gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Copyright 2016 kohii * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package com.smoothcsv.swing.gridsheet.model; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.function.Consumer; import com.smoothcsv.swing.gridsheet.event.GridSheetDataEvent; import com.smoothcsv.swing.gridsheet.event.GridSheetStructureEvent; import lombok.Getter; import lombok.Setter; /** * @author kohii */ public class GridSheetModel implements IGridSheetModel { private static final int MIN = -1; private static final int MAX = Integer.MAX_VALUE; @Getter @Setter private Object defaultValue = ""; /** * The <code>List</code> of <code>List</code> of <code>Object</code> values. */ protected List<List> dataList; private List<Consumer<GridSheetDataEvent>> dataUpdateListeners = new ArrayList<>(); @Getter private boolean adjusting; private int firstAdjustedRowIndex = MAX; private int lastAdjustedRowIndex = MIN; private int firstAdjustedColumnIndex = MAX; private int lastAdjustedColumnIndex = MIN; private boolean adjustdStructureChanged = false; public GridSheetModel() {} /** * @param dataList */ public GridSheetModel(List<List> dataList) { setDataList(dataList, dataList.size(), getMaxColumnCount(dataList)); } /** * @param dataList */ public GridSheetModel(List<List> dataList, int rowCount, int columnCount) { setDataList(dataList, rowCount, columnCount); } public void setDataList(List<List> dataList) { setDataList(dataList, dataList.size(), getMaxColumnCount(dataList)); } public void setDataList(List<List> dataList, int rowCount, int columnCount) { if (dataList == null || dataList.isEmpty()) { throw new IllegalArgumentException(); } this.dataList = dataList; invalidateWidthCache(); invalidateHeightCache(); List<GridSheetRow> rows = new ArrayList<>(rowCount); for (int i = 0; i < rowCount; i++) { rows.add(createDefaultRow()); } this.rows = rows; List<GridSheetColumn> columns = new ArrayList<>(columnCount); for (int i = 0; i < columnCount; i++) { columns.add(createDefaultColumn()); } this.columns = columns; fireStructureChanged(GridSheetStructureEvent.CHANGE_DATALIST); } private int getMaxColumnCount(List<List> dataList) { int columnCount = -1; for (int i = 0; i < dataList.size(); i++) { columnCount = Math.max(dataList.get(i).size(), columnCount); } return columnCount; } /** * Returns an attribute value for the cell at <code>row</code> and <code>column</code>. * * @param row the row whose value is to be queried * @param column the column whose value is to be queried * @return the value Object at the specified cell * @throws ArrayIndexOutOfBoundsException if an invalid row or column was given */ public Object getValueAt(int row, int column) { if (dataList.size() <= row) { return null; } List rowData = dataList.get(row); if (rowData.size() <= column) { return null; } return rowData.get(column); } /** * Sets the object value for the cell at <code>column</code> and <code>row</code>. * <code>aValue</code> is the new value. This method will generate a <code>tableChanged</code> * notification. * * @param aValue the new value; this can be null * @param row the row whose value is to be changed * @param column the column whose value is to be changed * @throws ArrayIndexOutOfBoundsException if an invalid row or column was given */ @SuppressWarnings("unchecked") public void setValueAt(Object aValue, int row, int column) { List<Object> rowData = dataList.get(row); rowData.set(column, aValue); fireDataUpdated(row, column); } public List getRowDataAt(int rowIndex) { return dataList.get(rowIndex); } protected void setRowDataAt(int rowIndex, Object[] data) { dataList.set(rowIndex, new ArrayList<Object>(Arrays.asList(data))); fireDataUpdated(rowIndex, 0, rowIndex, getColumnCount(), false); } // // Data change event // @Override public void addValueChangeListener(Consumer<GridSheetDataEvent> l) { dataUpdateListeners.add(l); } @Override public void removeValueChangeListener(Consumer<GridSheetDataEvent> l) { dataUpdateListeners.remove(l); } protected void fireDataUpdated(int row, int column) { if (adjusting) { markAsDirty(row, column); } else { if (!dataUpdateListeners.isEmpty()) { GridSheetDataEvent e = new GridSheetDataEvent(this, row, column, row, column); for (Consumer<GridSheetDataEvent> l : dataUpdateListeners) { l.accept(e); } } } } protected void fireDataUpdated(int firstRow, int firstColumn, int lastRow, int lastColumn, boolean structureChanged) { if (adjusting) { markAsDirty(firstRow, firstColumn); markAsDirty(lastRow, lastColumn); markStructureChanged(structureChanged); } else { if (!dataUpdateListeners.isEmpty()) { GridSheetDataEvent e = new GridSheetDataEvent(this, firstRow, firstColumn, lastRow, lastColumn, structureChanged); for (Consumer<GridSheetDataEvent> l : dataUpdateListeners) { l.accept(e); } } } } /** * @param adjusting the adjusting to set */ public void setAdjusting(boolean adjusting) { boolean old = this.adjusting; this.adjusting = adjusting; if (old != adjusting && !adjusting) { if (!dataUpdateListeners.isEmpty()) { GridSheetDataEvent e = new GridSheetDataEvent(this, firstAdjustedRowIndex, firstAdjustedColumnIndex, lastAdjustedRowIndex, lastAdjustedColumnIndex, adjustdStructureChanged); for (Consumer<GridSheetDataEvent> l : dataUpdateListeners) { l.accept(e); } } firstAdjustedRowIndex = MAX; firstAdjustedColumnIndex = MAX; lastAdjustedRowIndex = MIN; lastAdjustedColumnIndex = MIN; adjustdStructureChanged = false; } } // Updates first and last change indices private void markAsDirty(int r, int c) { if (r == -1 || c == -1) { return; } firstAdjustedRowIndex = Math.min(firstAdjustedRowIndex, r); lastAdjustedRowIndex = Math.max(lastAdjustedRowIndex, r); firstAdjustedColumnIndex = Math.min(firstAdjustedColumnIndex, c); lastAdjustedColumnIndex = Math.max(lastAdjustedColumnIndex, c); } private void markStructureChanged(boolean structureChanged) { adjustdStructureChanged = structureChanged || adjustdStructureChanged; } // // Reflect structure change to data model. // protected void insertColumnData(int index, int numColumns) { int rowCount = getRowCount(); for (int r = 0; r < rowCount; r++) { Object[] elements = new Object[numColumns]; Arrays.fill(elements, defaultValue); dataList.get(r).addAll(index, Arrays.asList(elements)); } fireDataUpdated(0, index, GridSheetDataEvent.TO_THE_END, GridSheetDataEvent.TO_THE_END, true); } protected void deleteColumnData(int index, int numColumns) { int rowCount = getRowCount(); for (int r = 0; r < rowCount; r++) { dataList.get(r).subList(index, index + numColumns).clear(); } fireDataUpdated(0, index, GridSheetDataEvent.TO_THE_END, GridSheetDataEvent.TO_THE_END, true); } protected void insertRowData(int index, int numRows) { int columnCount = getColumnCount(); List[] newData = new List[numRows]; for (int i = 0; i < newData.length; i++) { Object[] elements = new Object[columnCount]; Arrays.fill(elements, defaultValue); newData[i] = new ArrayList<>(Arrays.asList(elements)); } dataList.addAll(index, Arrays.asList(newData)); fireDataUpdated(index, 0, GridSheetDataEvent.TO_THE_END, GridSheetDataEvent.TO_THE_END, true); } protected void deleteRowData(int index, int numRows) { dataList.subList(index, index + numRows).clear(); fireDataUpdated(index, 0, GridSheetDataEvent.TO_THE_END, GridSheetDataEvent.TO_THE_END, true); } // //////////////////////////////////////////////////////////////////////////////// // Structure // //////////////////////////////////////////////////////////////////////////////// private List<Consumer<GridSheetStructureEvent>> structureChangelistenerList = new ArrayList<>(); protected List<GridSheetColumn> columns; protected List<GridSheetRow> rows; @Getter @Setter private int minColumnWidth = 8; @Getter @Setter private int maxColumnWidth = 1000; @Getter @Setter private int minRowHeight = 1; @Getter @Setter private int maxRowHeight = 300; @Getter @Setter private int defaultColumnWidth = 60; @Getter @Setter private int defaultRowHeight = 17; private int totalColumnWidth = -1; private int totalRowHeight = -1; @Override public GridSheetColumn getColumn(int index) { return columns.get(index); } @Override public GridSheetRow getRow(int index) { return rows.get(index); } /** * @return the columns */ public List<GridSheetColumn> getColumns() { return Collections.unmodifiableList(columns); } /** * @return the rows */ public List<GridSheetRow> getRows() { return Collections.unmodifiableList(rows); } @Override public int getColumnCount() { return columns.size(); } @Override public int getRowCount() { return rows.size(); } @Override public void addColumn(GridSheetColumn column) { insertColumn(getColumnCount(), column); } @Override public void addColumn(GridSheetColumn[] column) { insertColumn(getColumnCount(), column); } @Override public void addColumn(int numColumns) { insertColumn(getColumnCount(), numColumns); } @Override public void insertColumn(int index, GridSheetColumn column) { columns.add(index, column); fireColumnsInserted(index, new GridSheetColumn[]{column}, true); } @Override public void insertColumn(int index, GridSheetColumn[] column) { columns.addAll(index, Arrays.asList(column)); fireColumnsInserted(index, column, true); } @Override public void insertColumn(int index, int numColumns) { GridSheetColumn[] column = new GridSheetColumn[numColumns]; for (int i = 0; i < column.length; i++) { column[i] = createDefaultColumn(); } insertColumn(index, column); } @Override public GridSheetColumn deleteColumn(int index) { GridSheetColumn column = columns.remove(index); fireColumnsDeleted(index, new GridSheetColumn[]{column}); return column; } public GridSheetColumn[] deleteColumn(int index, int numColumns) { GridSheetColumn[] arrayColumnsRemoved = new GridSheetColumn[numColumns]; for (int i = 0; i < arrayColumnsRemoved.length; i++) { arrayColumnsRemoved[i] = columns.get(index + i); } columns.subList(index, index + numColumns).clear(); fireColumnsDeleted(index, arrayColumnsRemoved); return arrayColumnsRemoved; } @Override public void addRow(GridSheetRow row) { insertRow(getRowCount(), row); } @Override public void addRow(GridSheetRow[] row) { insertRow(getRowCount(), row); } @Override public void addRow(int numRows) { insertRow(getRowCount(), numRows); } @Override public void insertRow(int index, GridSheetRow row) { rows.add(index, row); fireRowsInserted(index, new GridSheetRow[]{row}); } @Override public void insertRow(int index, GridSheetRow[] row) { rows.addAll(index, Arrays.asList(row)); fireRowsInserted(index, row); } @Override public void insertRow(int index, int numRows) { GridSheetRow[] row = new GridSheetRow[numRows]; for (int i = 0; i < row.length; i++) { row[i] = createDefaultRow(); } insertRow(index, row); } @Override public GridSheetRow deleteRow(int index) { GridSheetRow row = rows.remove(index); fireRowsDeleted(index, new GridSheetRow[]{row}); return row; } public GridSheetRow[] deleteRow(int index, int numRows) { GridSheetRow[] arrayRowsRemoved = new GridSheetRow[numRows]; for (int i = 0; i < arrayRowsRemoved.length; i++) { arrayRowsRemoved[i] = rows.get(index + i); } rows.subList(index, index + numRows).clear(); fireRowsDeleted(index, arrayRowsRemoved); return arrayRowsRemoved; } // Width and Heidht --------------------------- @Override public int getTotalColumnWidth() { if (totalColumnWidth == -1) { totalColumnWidth = 0; for (int i = 0; i < columns.size(); i++) { totalColumnWidth += columns.get(i).getWidth(); } } return totalColumnWidth; } protected void invalidateWidthCache() { totalColumnWidth = -1; } @Override public int getTotalRowHeight() { if (totalRowHeight == -1) { totalRowHeight = 0; for (int i = 0; i < rows.size(); i++) { totalRowHeight += rows.get(i).getHeight(); } } return totalRowHeight; } protected void invalidateHeightCache() { totalRowHeight = -1; } // Name ------------------------ public String getColumnName(int column) { return String.valueOf(column + 1); } public String getRowName(int row) { return String.valueOf(row + 1); } public List<String> getColumnNames() { int columnCount = getColumnCount(); List<String> columnNames = new ArrayList<>(columnCount); for (int i = 0; i < columnCount; i++) { columnNames.add(getColumnName(i)); } return columnNames; } // Events ------------------------- @Override public void addStructureChangeListener(Consumer<GridSheetStructureEvent> l) { structureChangelistenerList.add(l); } @Override public void removeStructureChangeListener(Consumer<GridSheetStructureEvent> l) { structureChangelistenerList.remove(l); } protected void fireColumnsInserted(int index, GridSheetColumn[] columnsInserted, boolean createData) { if (createData) { insertColumnData(index, columnsInserted.length); } invalidateWidthCache(); if (!structureChangelistenerList.isEmpty()) { GridSheetStructureEvent e = new GridSheetStructureEvent(this, GridSheetStructureEvent.INSERT_COLUMN, adjusting, index, columnsInserted.length, getRowCount(), getColumnCount()); for (Consumer<GridSheetStructureEvent> l : structureChangelistenerList) { l.accept(e); } } } protected void fireColumnsDeleted(int index, GridSheetColumn[] columnsRemoved) { deleteColumnData(index, columnsRemoved.length); invalidateWidthCache(); if (!structureChangelistenerList.isEmpty()) { GridSheetStructureEvent e = new GridSheetStructureEvent(this, GridSheetStructureEvent.REMOVE_COLUMN, adjusting, index, columnsRemoved.length, getRowCount(), getColumnCount()); for (Consumer<GridSheetStructureEvent> l : structureChangelistenerList) { l.accept(e); } } } protected void fireRowsInserted(int index, GridSheetRow[] rowsInserted) { insertRowData(index, rowsInserted.length); invalidateHeightCache(); if (!adjusting) { if (!structureChangelistenerList.isEmpty()) { GridSheetStructureEvent e = new GridSheetStructureEvent(this, GridSheetStructureEvent.INSERT_ROW, adjusting, index, rowsInserted.length, getRowCount(), getColumnCount()); for (Consumer<GridSheetStructureEvent> l : structureChangelistenerList) { l.accept(e); } } } } protected void fireRowsDeleted(int index, GridSheetRow[] rowsRemoved) { deleteRowData(index, rowsRemoved.length); invalidateHeightCache(); if (!structureChangelistenerList.isEmpty()) { GridSheetStructureEvent e = new GridSheetStructureEvent(this, GridSheetStructureEvent.REMOVE_ROW, adjusting, index, rowsRemoved.length, getRowCount(), getColumnCount()); for (Consumer<GridSheetStructureEvent> l : structureChangelistenerList) { l.accept(e); } } } public void fireVisibleColumnsUpdated() { invalidateWidthCache(); fireStructureChanged(GridSheetStructureEvent.UPDATE_VISIBLE_COLUMNS); } public void fireVisibleRowsUpdated() { invalidateHeightCache(); fireStructureChanged(GridSheetStructureEvent.UPDATE_VISIBLE_ROWS); } public void fireWidthUpdated() { invalidateWidthCache(); fireStructureChanged(GridSheetStructureEvent.UPDATE_WIDTH); } public void fireHeightUpdated() { invalidateHeightCache(); fireStructureChanged(GridSheetStructureEvent.UPDATE_HEIGHT); } protected void fireStructureChanged(int type) { if (!structureChangelistenerList.isEmpty()) { GridSheetStructureEvent e = new GridSheetStructureEvent(this, type, adjusting); for (Consumer<GridSheetStructureEvent> l : structureChangelistenerList) { l.accept(e); } } } protected GridSheetColumn createDefaultColumn() { return new GridSheetColumn(defaultColumnWidth, this); } protected GridSheetColumn createDefaultColumn(long id) { return new GridSheetColumn(id, defaultColumnWidth, this); } protected GridSheetRow createDefaultRow() { return new GridSheetRow(this); } // for SmoothCSV //////////////////////////////////////// public int getColumnCountAt(int rowIndex) { return dataList.get(rowIndex).size(); } public void sort(int[] order) { int len = dataList.size(); if (len != order.length) { throw new IllegalArgumentException(); } List[] newData = new List[len]; for (int i = 0; i < order.length; i++) { newData[order[i]] = dataList.get(i); } dataList = new ArrayList<List>(Arrays.asList(newData)); GridSheetRow[] newRows = new GridSheetRow[len]; for (int i = 0; i < order.length; i++) { newRows[order[i]] = rows.get(i); } rows = new ArrayList<GridSheetRow>(Arrays.asList(newRows)); fireStructureChanged(GridSheetStructureEvent.SORT_ROWS); } public void sort(int[] order, int[] targetRows) { int len = dataList.size(); if (len < order.length) { throw new IllegalArgumentException(); } else if (order.length != targetRows.length) { throw new IllegalArgumentException(); } // TODO improve List<List> targetDataList = new ArrayList<>(); for (int i = 0; i < targetRows.length; i++) { int r = targetRows[i]; targetDataList.add(dataList.get(r)); } List[] newTargetData = new List[len]; for (int i = 0; i < order.length; i++) { newTargetData[order[i]] = targetDataList.get(i); } for (int i = 0; i < targetRows.length; i++) { int r = targetRows[i]; dataList.set(r, newTargetData[i]); } List<GridSheetRow> targetRowList = new ArrayList<>(); for (int i = 0; i < targetRows.length; i++) { int r = targetRows[i]; targetRowList.add(rows.get(r)); } GridSheetRow[] newTargetRowList = new GridSheetRow[len]; for (int i = 0; i < order.length; i++) { newTargetRowList[order[i]] = targetRowList.get(i); } for (int i = 0; i < targetRows.length; i++) { int r = targetRows[i]; rows.set(r, newTargetRowList[i]); } fireStructureChanged(GridSheetStructureEvent.SORT_ROWS); } public void sort(int[] order, CellRect targetCells) { setAdjusting(true); List<List> targetDataList = new ArrayList<>(targetCells.getNumRows()); for (int r = targetCells.getRow(); r <= targetCells.getLastRow(); r++) { List rowData = new ArrayList(targetCells.getNumColumns()); for (int c = targetCells.getColumn(); c <= targetCells.getLastColumn(); c++) { Object v = getValueAt(r, c); rowData.add(v); } targetDataList.add(rowData); } List[] newTargetData = new List[targetCells.getNumRows()]; for (int i = 0; i < order.length; i++) { newTargetData[order[i]] = targetDataList.get(i); } for (int r = 0; r < targetCells.getNumRows(); r++) { for (int c = 0; c < targetCells.getNumColumns(); c++) { getRowDataAt(r + targetCells.getRow()).set(c + targetCells.getColumn(), newTargetData[r].get(c)); } } fireDataUpdated(targetCells.getRow(), targetCells.getColumn()); fireDataUpdated(targetCells.getLastRow(), targetCells.getLastColumn()); setAdjusting(false); } }
package ru.matevosyan.json.entity; import java.sql.Timestamp; /** * JsonResponse convert list of JsonResponse from offer, user and car model. * Collected together to send to the client. */ public class JsonResponse { private Integer offerId; private String tittle; private String description; private String picture; private Boolean soldState; private Timestamp postingDate; private String address; private Integer price; private String name; private String phoneNumber; private String city; private Integer userId; private String role; private Timestamp yearOfManufacture; private String modelVehicle; private String gearBox; private Float engineCapacity; private String bodyType; private String brand; /** * JsonResponse constructor. */ public JsonResponse() { } /** * Setter for offer id. * @param offerId Integer. */ public void setOfferId(Integer offerId) { this.offerId = offerId; } /** * Setter. * @param tittle offer. */ public void setTittle(String tittle) { this.tittle = tittle; } /** * Setter. * @param description offer. */ public void setDescription(String description) { this.description = description; } /** * Setter. * @param picture offer. */ public void setPicture(String picture) { this.picture = picture; } /** * Setter. * @param soldState offer. */ public void setSoldState(Boolean soldState) { this.soldState = soldState; } /** * Setter. * @param postingDate offer. */ public void setPostingDate(Timestamp postingDate) { this.postingDate = postingDate; } /** * Setter. * @param address offer. */ public void setAddress(String address) { this.address = address; } /** * Setter. * @param price offer. */ public void setPrice(Integer price) { this.price = price; } /** * Setter. * @param name user. */ public void setName(String name) { this.name = name; } /** * Setter. * @param phoneNumber user. */ public void setPhoneNumber(String phoneNumber) { this.phoneNumber = phoneNumber; } /** * Setter. * @param city user. */ public void setCity(String city) { this.city = city; } /** * Setter. * @param userId user. */ public void setUserId(Integer userId) { this.userId = userId; } /** * Setter. * @param role user. */ public void setRole(String role) { this.role = role; } /** * Setter. * @param yearOfManufacture car. */ public void setYearOfManufacture(Timestamp yearOfManufacture) { this.yearOfManufacture = yearOfManufacture; } /** * Setter. * @param modelVehicle car. */ public void setModelVehicle(String modelVehicle) { this.modelVehicle = modelVehicle; } /** * Setter. * @param gearBox car. */ public void setGearBox(String gearBox) { this.gearBox = gearBox; } /** * Setter. * @param engineCapacity car. */ public void setEngineCapacity(Float engineCapacity) { this.engineCapacity = engineCapacity; } /** * Setter. * @param bodyType car. */ public void setBodyType(String bodyType) { this.bodyType = bodyType; } /** * Setter. * @param brand car. */ public void setBrand(String brand) { this.brand = brand; } /** * Getter. * @return offer id. */ public Integer getOfferId() { return offerId; } /** * Getter. * @return offer tittle. */ public String getTittle() { return tittle; } /** * Getter. * @return offer description. */ public String getDescription() { return description; } /** * Getter. * @return offer picture. */ public String getPicture() { return picture; } /** * Getter. * @return offer soldState. */ public Boolean getSoldState() { return soldState; } /** * Getter. * @return offer posting Date. */ public Timestamp getPostingDate() { return postingDate; } /** * Getter. * @return offer address. */ public String getAddress() { return address; } /** * Getter. * @return offer price. */ public Integer getPrice() { return price; } /** * Getter. * @return user name. */ public String getName() { return name; } /** * Getter. * @return user phone number. */ public String getPhoneNumber() { return phoneNumber; } /** * Getter. * @return user city. */ public String getCity() { return city; } /** * Getter. * @return user id. */ public Integer getUserId() { return userId; } /** * Getter. * @return user role. */ public String getRole() { return role; } /** * Getter. * @return car year of manufacture. */ public Timestamp getYearOfManufacture() { return yearOfManufacture; } /** * Getter. * @return car model vehicle. */ public String getModelVehicle() { return modelVehicle; } /** * Getter. * @return car gearBox. */ public String getGearBox() { return gearBox; } /** * Getter. * @return car engine capacity. */ public Float getEngineCapacity() { return engineCapacity; } /** * Getter. * @return car body type. */ public String getBodyType() { return bodyType; } /** * Getter. * @return car brand. */ public String getBrand() { return brand; } }
// ********************************************************************** // // <copyright> // // BBN Technologies // 10 Moulton Street // Cambridge, MA 02138 // (617) 873-8000 // // Copyright (C) BBNT Solutions LLC. All rights reserved. // // </copyright> // ********************************************************************** // // $Source: /cvs/distapps/openmap/src/openmap/com/bbn/openmap/omGraphics/util/ArcCalc.java,v $ // $RCSfile: ArcCalc.java,v $ // $Revision: 1.5 $ // $Date: 2005/08/10 22:28:13 $ // $Author: dietrick $ // // ********************************************************************** package com.bbn.openmap.omGraphics.util; import java.awt.Graphics; import java.awt.Point; import java.awt.geom.Point2D; import java.io.Serializable; import com.bbn.openmap.MoreMath; import com.bbn.openmap.omGraphics.OMColor; import com.bbn.openmap.omGraphics.OMGraphicList; import com.bbn.openmap.omGraphics.OMLine; import com.bbn.openmap.omGraphics.OMRect; import com.bbn.openmap.proj.Projection; import com.bbn.openmap.util.Debug; /** * A class that calculates an arc between two points, given the point * coordinates, and an arc measurement that represents, in radians, * the length of the part of the circle that should be represented by * the arc. */ public class ArcCalc implements Serializable { /** Debugging list showing algorithm points. */ protected transient OMGraphicList arcGraphics = null; protected transient float[] xpoints; protected transient float[] ypoints; /** * This setting is the amount of an angle, limited to a * semi-circle (PI) that the curve will represent. In other words, * the arc between the two end points is going to look like a 0 * degrees of a circle (straight line, which is the default), or * 180 degrees of a circle (full semi-circle). Given in radians, * though, not degrees. OK? */ protected double arcAngle = 0; /** * For x-y and offset lines that have an arc drawn between them, * tell which way the arc should be drawn, toward the Equator, or * away from it, generally. Default is true, to make it look like * great circle line for northern hemisphere lines. */ protected boolean arcUp = true; /** * Set to true if the points for the arc line up from x2, y2 to * x1, y1 */ protected boolean reversed = false; /** * Set the arc that is drawn between the points of a x-y or offset * line. If the arc amount is negative, the arc will be flipped * over. * * @param aa arcAngle, in radians, between 0-PI. * @param putArcUp arc peak above points. */ public ArcCalc(double aa, boolean putArcUp) { arcAngle = aa; arcUp = putArcUp; // If it's negative, flip it over... if (aa < 0) { arcAngle *= -1.0; arcUp = !arcUp; } if (arcAngle > Math.PI) { arcAngle = Math.PI; } } /** * Return the arc angle set for this line. Will only be set if it * was set externally. * * @return arc angle in radians. */ public double getArcAngle() { return arcAngle; } /** * Returns true if the arc direction setting is upward, meaning * that the peak of the arc is above (or more so) the line that * goes between the two points. */ public boolean isArcUp() { return arcUp; } /** * Generate the points that will generate the curved line between * two points. The arcAngle is the number of radians of a circle * that the arc should represent. Math.PI is the Max. The * setArcAngle should be called before this method is called, so * that the method knows what to create. */ public void generate(int x1, int y1, int x2, int y2) { // The algorithm. // // Draw a straight line between the points, and figure out the // center point between them on the line. Then, on another // line that is perpendicular to the first line, figure out // where the point is that will act as a center of a circle. // That circle needs to pass through both points, and the // radius is such that the arc angle of the circle between the // points is the same as the arcAngle set for the ArcCalc. // Then, the arc needs to be generated. This is done by // looking at the circle, and figuring out the angle (from 0 // to 2PI) that the line from the center to point 1, and then // the center to point 2. This gives us the angular extents // of the arc. Then we need to figure out the angle // increments needed to get good coordinates for the arc. // Then, starting at the low arc angle, we increment it to get // the coordinates for the arced line, a given radius away // from the circle center, between the arc angle extents. Point midPoint = new Point(); Point arcCenter = new Point(); Point2D peakPoint = new Point2D.Float(); // pixel distance between points. double distance = Math.sqrt(Math.pow(Math.abs(y2 - y1), 2.0) + Math.pow(Math.abs(x2 - x1), 2.0)); // slope of straight line between points. double straightLineSlope = Math.atan((double) (y2 - y1) / (double) (x2 - x1)); // slope of line that the arc focus will reside on. double inverseSlope = straightLineSlope - (Math.PI / 2.0); if (Debug.debugging("arc")) { Debug.output("ArcCalc.generate: Slope is " + Math.toDegrees(straightLineSlope) + " degrees, distance = " + distance + " pixels."); } // centerX/Y is the midpoint between the two points. midPoint.setLocation(x1 + ((x2 - x1) / 2), y1 + ((y2 - y1) / 2)); if (Debug.debugging("arc")) { Debug.output("ArcCalc.generate: Center point for (" + x1 + ", " + y1 + ") to (" + x2 + ", " + y2 + ") is (" + midPoint.x + ", " + midPoint.y + ")"); } double arccos = Math.cos(arcAngle); double arcRadius; if (arccos != 1.0) { arcRadius = distance / Math.sqrt(2.0 * (1.0 - Math.cos(arcAngle))); } else { arcRadius = distance / Math.sqrt(2.0); } if (Debug.debugging("arc")) { Debug.output("ArcCalc.generate: radius of arc = " + arcRadius); } // R' is the distance down the inverse negative slope of the // line that the focus of the arc is located. // x is the distance along the right leg of the arc that is // left over after Rcos(arcAngle) is subtracted from it, in // order to derive the angle of the straight line between the // two points. double x = arcRadius - arcRadius * Math.cos(arcAngle); double rPrime = (distance / 2.0) * (Math.sqrt(1.0 - Math.pow(x / distance, 2.0))) / Math.sin(arcAngle / 2.0); if (Debug.debugging("arc")) { Debug.output("ArcCalc.generate: rPrime = " + rPrime); } int direction = 1; if (arcUp) direction = -1; // arcCenter.x and arcCenter.y are the coordinates of the // focus of the Arc. arcCenter.x = midPoint.x + (direction * (int) (rPrime * Math.cos(inverseSlope))); arcCenter.y = midPoint.y + (direction * (int) (rPrime * Math.sin(inverseSlope))); if (Debug.debugging("arc")) { Debug.output("ArcCalc.generateArc: creating supplimental graphics list"); arcGraphics = new OMGraphicList(); double dist1 = Math.sqrt(Math.pow((double) (arcCenter.x - x1), 2.0) + Math.pow((double) (arcCenter.y - y1), 2.0)); double dist2 = Math.sqrt(Math.pow((double) (arcCenter.x - x2), 2.0) + Math.pow((double) (arcCenter.y - y2), 2.0)); Debug.output("ArcCalc.generate: Center focus for arc is (" + arcCenter.x + ", " + arcCenter.y + ") along slope line of " + Math.toDegrees(inverseSlope) + " degrees)."); Debug.output("ArcCalc.generate: Distance to point 1 from arc focus = " + dist1 + "\n Distance to point 2 from arc focus = " + dist2); // Let's highlight the end points. OMRect point1 = new OMRect(x1 - 1, y1 - 1, x1 + 1, y1 + 1); OMRect point2 = new OMRect(x2 - 1, y2 - 1, x2 + 1, y2 + 1); OMRect arcPoint = new OMRect(arcCenter.x - 1, arcCenter.y - 1, arcCenter.x + 1, arcCenter.y + 1); point1.setLinePaint(OMColor.red); point2.setLinePaint(OMColor.red); arcPoint.setLinePaint(OMColor.blue); arcGraphics.add(point1); arcGraphics.add(point2); arcGraphics.add(arcPoint); OMLine line1 = new OMLine(x1, y1, x2, y2); OMLine line2 = new OMLine(midPoint.x, midPoint.y, arcCenter.x, arcCenter.y); arcGraphics.add(line1); arcGraphics.add(line2); } int realCount = 0; // Figure out the arc extents for each endpoint. I think // it's easier to keep track of the angles if they are always // positive, and we always go from smaller to larger. double startSlope = getRealAngle((float)arcCenter.getX(), (float)arcCenter.getY(), x1, y1); double endSlope = getRealAngle((float)arcCenter.getX(), (float)arcCenter.getY(), x2, y2); double smallSlope, largeSlope; double angleIncrement; smallSlope = (startSlope > endSlope) ? endSlope : startSlope; largeSlope = (smallSlope == startSlope) ? endSlope : startSlope; // Have to make sure we take the smaller arc around the // circle. while (Math.abs(smallSlope - largeSlope) > Math.PI) { if (Math.abs(largeSlope - smallSlope - Math.PI) < .001) { // Catch 180 degree angles that are close enough... break; } Debug.message("arc", "ArcCalc.generate: Modifying the starting slope."); double tmpSlope = smallSlope + MoreMath.TWO_PI; smallSlope = largeSlope; largeSlope = tmpSlope; } // Experienced some trouble with vertical and horizontal half // circles. This took care of that. if (MoreMath.approximately_equal(arcAngle, Math.PI) && arcUp) { Debug.message("arc", "ArcCalc.generate: Modifying 180 angle points."); double tmpSlope = smallSlope + MoreMath.TWO_PI; smallSlope = largeSlope; largeSlope = tmpSlope; } // Figure out the angle increment for grabbing coordinates - // use the larger dimension of the arc end point differences. if (Math.abs(y2 - y1) < Math.abs(x2 - x1)) { angleIncrement = Math.PI / Math.abs(x2 - x1); } else { angleIncrement = Math.PI / Math.abs(y2 - y1); } int numPoints = (int) (Math.abs(smallSlope - largeSlope) / angleIncrement + 2); float[] xPoints = new float[numPoints]; float[] yPoints = new float[numPoints]; if (Debug.debugging("arc")) { Debug.output("ArcCalc.generate: angle to x1, y1 is " + startSlope + " (" + Math.toDegrees(startSlope) + " degrees), angle to x2, y2 is " + endSlope + " (" + Math.toDegrees(endSlope) + " degrees)"); Debug.output("ArcCalc.generate: Starting angle is " + smallSlope + "(" + Math.toDegrees(smallSlope) + " degrees), end angle is " + largeSlope + " (" + Math.toDegrees(largeSlope) + " degrees), incrementing by " + angleIncrement + " (" + Math.toDegrees(angleIncrement) + " degrees)"); } reversed = false; // Get the coordinates of the arc from the arc extents. while (smallSlope < largeSlope && realCount < numPoints) { xPoints[realCount] = arcCenter.x + (int) (arcRadius * Math.cos(smallSlope)); yPoints[realCount] = arcCenter.y + (int) (arcRadius * Math.sin(smallSlope)); if (realCount == 0 && xPoints[realCount] == x2) { Debug.message("arc", "ArcCalc: line reversed"); reversed = true; } if (Debug.debugging("arc") && realCount == 0) { OMLine startLine = new OMLine(arcCenter.x, arcCenter.y, (int) xPoints[0], (int) yPoints[0]); startLine.setLinePaint(OMColor.white); arcGraphics.add(startLine); } else if (Debug.debugging("arcdetail")) { Debug.output(" angle " + smallSlope + " (" + smallSlope * 180 / Math.PI + " degrees) = " + xPoints[realCount] + ", " + yPoints[realCount]); } if (Math.abs(largeSlope - smallSlope - (arcAngle / 2.0)) < angleIncrement) { // Found the halfway point, mark it... peakPoint.setLocation(xPoints[realCount], yPoints[realCount]); Debug.message("arc", "ArcCalc: Found a midpoint."); } smallSlope += angleIncrement; realCount++; } // Give the coordinates to the OMLine. xpoints = new float[realCount]; ypoints = new float[realCount]; System.arraycopy(xPoints, 0, xpoints, 0, realCount); System.arraycopy(yPoints, 0, ypoints, 0, realCount); } /** * Given the straight line between two points, figure out the * angle, in radians, of that line in relation to the coordinate * system on the screen. Always returns a positive value, and the * angle is from point 1 to point 2. */ protected double getRealAngle(float x1, float y1, float x2, float y2) { double angle = 0; double horDiff = (double) (x2 - x1); double vertDiff = (double) (y2 - y1); // If there is no horizontal difference, then it's pointing // up or down. if (horDiff == 0) { if (vertDiff > 0) { angle = MoreMath.HALF_PI; } else if (vertDiff < 0) { angle = -MoreMath.HALF_PI; } } else { angle = Math.atan(vertDiff / horDiff); // It's pointed in the wrong direction... fix it here. if (horDiff < 0) { angle += Math.PI; } } // Either way, I think we want to make the angle positive. while (angle < 0) { angle += MoreMath.TWO_PI; } return angle; } public float[] getXPoints() { return xpoints; } public float[] getYPoints() { return ypoints; } public void generate(Projection proj) { if (proj != null && arcGraphics != null) { arcGraphics.generate(proj); } } public void render(Graphics g) { if (arcGraphics != null) { Debug.output("OMLine rendering " + arcGraphics.size() + " arcGraphics."); arcGraphics.render(g); } } public OMGraphicList getArcGraphics() { if (arcGraphics == null) { return new OMGraphicList(); } else { return arcGraphics; } } public boolean getReversed() { return reversed; } }
/* * Copyright (c) 2005-2013, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.deployment.synchronizer.git; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.eclipse.jgit.api.*; import org.eclipse.jgit.api.errors.*; import org.eclipse.jgit.storage.file.FileRepository; import org.eclipse.jgit.transport.UsernamePasswordCredentialsProvider; import org.wso2.carbon.deployment.synchronizer.ArtifactRepository; import org.wso2.carbon.deployment.synchronizer.DeploymentSynchronizerException; import org.wso2.carbon.deployment.synchronizer.RepositoryManager; import org.wso2.carbon.deployment.synchronizer.git.internal.AbstractBehaviour; import org.wso2.carbon.deployment.synchronizer.git.internal.GitDeploymentSynchronizerConstants; import org.wso2.carbon.deployment.synchronizer.git.internal.GitDeploymentSyncronizerConfiguration; import org.wso2.carbon.deployment.synchronizer.git.repository_creator.GitBlitBasedRepositoryCreator; import org.wso2.carbon.deployment.synchronizer.git.repository_creator.SCMBasedRepositoryCreator; import org.wso2.carbon.deployment.synchronizer.git.stratos2.S2Behaviour; import org.wso2.carbon.deployment.synchronizer.git.stratos2.S2GitRepositoryManager; import org.wso2.carbon.deployment.synchronizer.git.util.CarbonUtilities; import org.wso2.carbon.deployment.synchronizer.git.util.FileUtilities; import org.wso2.carbon.deployment.synchronizer.git.util.GitUtilities; import org.wso2.carbon.deployment.synchronizer.DeploymentSynchronizerConstants; import org.wso2.carbon.deployment.synchronizer.util.RepositoryConfigParameter; import org.wso2.carbon.utils.multitenancy.MultitenantUtils; import java.io.File; import java.io.IOException; import java.util.List; import java.util.Set; /** * Git based artifact repository */ public class GitBasedArtifactRepository implements ArtifactRepository { private static final Log log = LogFactory.getLog(GitBasedArtifactRepository.class); private RepositoryManager repositoryManager; private GitDeploymentSyncronizerConfiguration gitDepsyncConfig; private AbstractBehaviour behaviour; /* * Constructor * */ public GitBasedArtifactRepository () { //TODO:fix properly if(!isGitDeploymentSyncEnabled()) { return; } readConfiguration(); //standard worker manager separated deployment if(gitDepsyncConfig.isStandardDeployment()) { //GitBlit git server if(gitDepsyncConfig.getGitServer().equals(GitDeploymentSynchronizerConstants.SERVER_GITBLIT)){ repositoryManager = new DefaultGitRepositoryManager(new GitBlitBasedRepositoryCreator()); } //SCM git server else if (gitDepsyncConfig.getGitServer().equals(GitDeploymentSynchronizerConstants.SERVER_SCM)) { repositoryManager = new DefaultGitRepositoryManager(new SCMBasedRepositoryCreator()); } //No specific Git server specified - use a single repository else if (gitDepsyncConfig.getGitServer().equals(GitDeploymentSynchronizerConstants.SERVER_UNSPECIFIED)) { repositoryManager = new SingleTenantGitRepositoryManager(); } behaviour = new DefaultBehaviour(); } //Stratos 2 specific deployment else { repositoryManager = new S2GitRepositoryManager(); behaviour = new S2Behaviour(); } } private boolean isGitDeploymentSyncEnabled () { //check if deployment synchronization is enabled String depSyncEnabledParam = CarbonUtilities.readConfigurationParameter(GitDeploymentSynchronizerConstants.ENABLED); //Check if deployment synchronization is enabled if (depSyncEnabledParam != null && depSyncEnabledParam.equals("true")) { //check if repository type is 'git', else no need to create GitBasedArtifactRepository instance String repoTypeParam = CarbonUtilities.readConfigurationParameter(GitDeploymentSynchronizerConstants.REPOSITORY_TYPE); if (repoTypeParam != null && repoTypeParam.equals(DeploymentSynchronizerConstants.REPOSITORY_TYPE_GIT)) { return true; } } return false; } /** * Reads the configuration */ private void readConfiguration () { gitDepsyncConfig = new GitDeploymentSyncronizerConfiguration(); String standardDeploymentParam = CarbonUtilities.readConfigurationParameter(GitDeploymentSynchronizerConstants.DEPLOYMENT_METHOD); if (standardDeploymentParam != null && (standardDeploymentParam.equalsIgnoreCase("true") || standardDeploymentParam.equalsIgnoreCase("false"))) { gitDepsyncConfig.setStandardDeployment(Boolean.parseBoolean(standardDeploymentParam)); } String gitServerParam = CarbonUtilities.readConfigurationParameter(GitDeploymentSynchronizerConstants.GIT_SERVER); if (gitServerParam != null) { gitDepsyncConfig.setGitServer(gitServerParam); } } /** * Called at tenant load to do initialization related to the tenant * * @param tenantId id of the tenant * @throws DeploymentSynchronizerException in case of an error */ public void init (int tenantId) throws DeploymentSynchronizerException { TenantGitRepositoryContext repoCtx = new TenantGitRepositoryContext(); String gitLocalRepoPath = MultitenantUtils.getAxis2RepositoryPath(tenantId); repoCtx.setTenantId(tenantId); repoCtx.setLocalRepoPath(gitLocalRepoPath); FileRepository localRepo = null; try { localRepo = new FileRepository(new File(gitLocalRepoPath + "/.git")); } catch (IOException e) { handleError("Error creating git local repository for tenant " + tenantId, e); } repoCtx.setLocalRepo(localRepo); repoCtx.setGit(new Git(localRepo)); repoCtx.setCloneExists(false); TenantGitRepositoryContextCache.getTenantRepositoryContextCache().cacheTenantGitRepoContext(tenantId, repoCtx); //provision a repository repositoryManager.provisionRepository(tenantId); //repositoryManager.addRepository(tenantId, url); repositoryManager.getUrlInformation(tenantId); repositoryManager.getCredentialsInformation(tenantId); } /** * Commits any changes in the local repository to the relevant remote repository * * @param localRepoPath tenant's local repository path * @return true if commit is successful, else false * @throws DeploymentSynchronizerException in case of an error */ public boolean commit(int tenantId, String localRepoPath) throws DeploymentSynchronizerException { String gitRepoUrl = repositoryManager.getUrlInformation(tenantId).getUrl(); if(gitRepoUrl == null) { //url not available log.warn ("Remote repository URL not available for tenant " + tenantId + ", aborting commit"); return false; } TenantGitRepositoryContext gitRepoCtx = TenantGitRepositoryContextCache.getTenantRepositoryContextCache(). retrieveCachedTenantGitContext(tenantId); Status status = getGitStatus(gitRepoCtx); if (status == null) { return false; } if(status.isClean()) {//no changes, nothing to commit if(log.isDebugEnabled()) log.debug("No changes detected in the local repository at " + localRepoPath); return false; } if(!addArtifacts(gitRepoCtx, getNewArtifacts(status)) && !addArtifacts(gitRepoCtx, getModifiedArtifacts(status)) && !removeArtifacts(gitRepoCtx, getRemovedArtifacts(status))) { //no changes! return false; } commitToLocalRepo(gitRepoCtx); pushToRemoteRepo(gitRepoCtx); return behaviour.requireSynchronizeRepositoryRequest(); } /** * Quesries the git status for the repository given by gitRepoCtx * * @param gitRepoCtx TenantGitRepositoryContext instance for the tenant * @return Status instance updated with relevant status information, * null in an error in getting the status */ private Status getGitStatus (TenantGitRepositoryContext gitRepoCtx) { Git git = gitRepoCtx.getGit(); StatusCommand statusCmd = git.status(); Status status; try { status = statusCmd.call(); } catch (GitAPIException e) { log.error("Git status operation for tenant " + gitRepoCtx.getTenantId() + " failed, ", e); status = null; } return status; } /** * Returns the newly added artifact set relevant to the current status of the repository * * @param status git status * @return artifact names set */ private Set<String> getNewArtifacts (Status status) { return status.getUntracked(); } /** * Returns the removed (undeployed) artifact set relevant to the current status of the repository * * @param status git status * @return artifact names set */ private Set<String> getRemovedArtifacts (Status status) { return status.getMissing(); } /** * Return the modified artifacts set relevant to the current status of the repository * * @param status git status * @return artifact names set */ private Set<String> getModifiedArtifacts (Status status) { return status.getModified(); } /** * Adds the artifacts to the local staging area * * @param gitRepoCtx TenantGitRepositoryContext instance * @param artifacts set of artifacts * @return true if artifacts were added */ private boolean addArtifacts (TenantGitRepositoryContext gitRepoCtx, Set<String> artifacts) { if(artifacts.isEmpty()) { return false; } boolean artifactsAdded; AddCommand addCmd = gitRepoCtx.getGit().add(); for (String artifact : artifacts) { addCmd.addFilepattern(artifact); } try { addCmd.call(); artifactsAdded = true; } catch (GitAPIException e) { log.error("Adding artifact to the repository at " + gitRepoCtx.getLocalRepoPath() + "failed", e); artifactsAdded = false; } return artifactsAdded; } /** * Removes the set of artifacts from local repo * * @param gitRepoCtx TenantGitRepositoryContext instance * @param artifacts Set of artifact names to remove * @return true if artifacts were removed */ private boolean removeArtifacts (TenantGitRepositoryContext gitRepoCtx, Set<String> artifacts) { if(artifacts.isEmpty()) { return false; } boolean artifactsRemoved; RmCommand rmCmd = gitRepoCtx.getGit().rm(); for (String artifact : artifacts) { rmCmd.addFilepattern(artifact); } try { rmCmd.call(); artifactsRemoved = true; } catch (GitAPIException e) { log.error("Removing artifact from the repository at " + gitRepoCtx.getLocalRepoPath() + "failed", e); artifactsRemoved = false; } return artifactsRemoved; } /** * Commits changes for a tenant to relevant the local repository * * @param gitRepoCtx TenantGitRepositoryContext instance for the tenant */ private void commitToLocalRepo (TenantGitRepositoryContext gitRepoCtx) { CommitCommand commitCmd = gitRepoCtx.getGit().commit(); commitCmd.setMessage("tenant " + gitRepoCtx.getTenantId() + "'s artifacts committed to repository at " + gitRepoCtx.getLocalRepoPath() + ", time stamp: " + System.currentTimeMillis()); try { commitCmd.call(); } catch (GitAPIException e) { log.error("Committing artifacts to repository failed for tenant " + gitRepoCtx.getTenantId(), e); } } /** * Pushes the artifacts of the tenant to relevant remote repository * * @param gitRepoCtx TenantGitRepositoryContext instance for the tenant */ private void pushToRemoteRepo(TenantGitRepositoryContext gitRepoCtx) { PushCommand pushCmd = gitRepoCtx.getGit().push(); UsernamePasswordCredentialsProvider credentialsProvider = GitUtilities.createCredentialsProvider(repositoryManager, gitRepoCtx.getTenantId()); if (credentialsProvider == null) { log.warn ("Remote repository credentials not available for tenant " + gitRepoCtx.getTenantId() + ", aborting push"); return; } pushCmd.setCredentialsProvider(credentialsProvider); try { pushCmd.call(); } catch (GitAPIException e) { log.error("Pushing artifacts to remote repository failed for tenant " + gitRepoCtx.getTenantId(), e); } } /** * Method inherited from ArtifactRepository for initializing checkout * * @param localRepoPath local repository path of the tenant * @return true if success, else false * @throws DeploymentSynchronizerException if an error occurs */ public boolean checkout (int tenantId, String localRepoPath) throws DeploymentSynchronizerException { String gitRepoUrl = repositoryManager.getUrlInformation(tenantId).getUrl(); if(gitRepoUrl == null) { //url not available log.warn ("Remote repository URL not available for tenant " + tenantId + ", aborting checkout"); return false; } TenantGitRepositoryContext gitRepoCtx = TenantGitRepositoryContextCache.getTenantRepositoryContextCache(). retrieveCachedTenantGitContext(tenantId); File gitRepoDir = new File(gitRepoCtx.getLocalRepoPath()); if (!gitRepoDir.exists()) { return cloneRepository(gitRepoCtx); } else { if (GitUtilities.isValidGitRepo(gitRepoCtx.getLocalRepo())) { log.info("Existing git repository detected for tenant " + gitRepoCtx.getTenantId() + ", no clone required"); try { return pullArtifacts(gitRepoCtx); } catch (CheckoutConflictException e) { //conflict(s) detected, try to checkout from local index if(checkoutFromLocalIndex(gitRepoCtx, e.getConflictingPaths())) { try { //now pull the changes from remote repository return pullArtifacts(gitRepoCtx); } catch (CheckoutConflictException e1) { //cannot happen here log.error("Git pull for the path " + e1.getConflictingPaths().toString() + " failed due to conflicts", e1); } } return false; } } else { if (behaviour.requireInitialLocalArtifactSync()) { return syncInitialLocalArtifacts(gitRepoCtx); } else { if(log.isDebugEnabled()) { log.debug("Repository for tenant " + gitRepoCtx.getTenantId() + " is not a valid git repo, will try to delete"); } FileUtilities.deleteFolderStructure(gitRepoDir); return cloneRepository(gitRepoCtx); } } } /*if(behaviour.requireInitialLocalArtifactSync() && !gitRepoCtx.initialArtifactsSynced()) { return syncInitialLocalArtifacts(gitRepoCtx); } else if(!gitRepoCtx.cloneExists()) { return cloneRepository(gitRepoCtx); } else { return pullArtifacts(gitRepoCtx); }*/ } /** * Sync any local artifact that are initially available with a remote repository * * @param gitRepoCtx TenantGitRepositoryContext instance * @return true if sync is success, else false */ private boolean syncInitialLocalArtifacts(TenantGitRepositoryContext gitRepoCtx) { boolean syncedLocalArtifacts = false; Status status = getGitStatus(gitRepoCtx); if(status != null && !status.isClean()) { //initialize repository GitUtilities.InitGitRepository(new File(gitRepoCtx.getLocalRepoPath())); //add the remote repository (origin) syncedLocalArtifacts = GitUtilities.addRemote(gitRepoCtx.getLocalRepo(), gitRepoCtx.getRemoteRepoUrl()); } return syncedLocalArtifacts; } /** * Clones the remote repository to the local repository path * * @param gitRepoCtx TenantGitRepositoryContext for the tenant * @return true if clone is success, else false */ private boolean cloneRepository (TenantGitRepositoryContext gitRepoCtx) { //should happen only at the beginning boolean cloneSuccess = false; File gitRepoDir = new File(gitRepoCtx.getLocalRepoPath()); /*if (gitRepoDir.exists()) { if(GitUtilities.isValidGitRepo(gitRepoCtx.getLocalRepo())) { //check if a this is a valid git repo log.info("Existing git repository detected for tenant " + gitRepoCtx.getTenantId() + ", no clone required"); gitRepoCtx.setCloneExists(true); return true; } else { if(log.isDebugEnabled()) { log.debug("Repository for tenant " + gitRepoCtx.getTenantId() + " is not a valid git repo, will try to delete"); } FileUtilities.deleteFolderStructure(gitRepoDir); //if not a valid git repo but non-empty, delete it (else the clone will not work) } }*/ CloneCommand cloneCmd = Git.cloneRepository(). setURI(gitRepoCtx.getRemoteRepoUrl()). setDirectory(gitRepoDir). setBranch(GitDeploymentSynchronizerConstants.GIT_REFS_HEADS_MASTER); UsernamePasswordCredentialsProvider credentialsProvider = GitUtilities.createCredentialsProvider(repositoryManager, gitRepoCtx.getTenantId()); if (credentialsProvider == null) { log.warn ("Remote repository credentials not available for tenant " + gitRepoCtx.getTenantId() + ", aborting clone"); return false; } cloneCmd.setCredentialsProvider(credentialsProvider); try { cloneCmd.call(); log.info("Git clone operation for tenant " + gitRepoCtx.getTenantId() + " successful"); gitRepoCtx.setCloneExists(true); cloneSuccess = true; } catch (TransportException e) { log.error("Accessing remote git repository failed for tenant " + gitRepoCtx.getTenantId(), e); } catch (GitAPIException e) { log.error("Git clone operation for tenant " + gitRepoCtx.getTenantId() + " failed", e); } return cloneSuccess; } /** * Pulling if any updates are available in the remote git repository. If basic authentication is required, * will call 'RepositoryInformationService' for credentials. * * @param gitRepoCtx TenantGitRepositoryContext instance for tenant * @return true if success, else false */ private boolean pullArtifacts (TenantGitRepositoryContext gitRepoCtx) throws CheckoutConflictException { PullCommand pullCmd = gitRepoCtx.getGit().pull(); UsernamePasswordCredentialsProvider credentialsProvider = GitUtilities.createCredentialsProvider(repositoryManager, gitRepoCtx.getTenantId()); if (credentialsProvider == null) { log.warn ("Remote repository credentials not available for tenant " + gitRepoCtx.getTenantId() + ", aborting pull"); return false; } pullCmd.setCredentialsProvider(credentialsProvider); try { pullCmd.call(); } catch (InvalidConfigurationException e) { log.warn("Git pull unsuccessful for tenant " + gitRepoCtx.getTenantId() + ", " + e.getMessage()); FileUtilities.deleteFolderStructure(new File(gitRepoCtx.getLocalRepoPath())); cloneRepository(gitRepoCtx); return true; } catch (JGitInternalException e) { log.warn("Git pull unsuccessful for tenant " + gitRepoCtx.getTenantId() + ", " + e.getMessage()); return false; } catch (TransportException e) { log.error("Accessing remote git repository " + gitRepoCtx.getRemoteRepoUrl() + " failed for tenant " + gitRepoCtx.getTenantId(), e); return false; } catch (CheckoutConflictException e) { log.warn("Git pull for the path " + e.getConflictingPaths().toString() + " failed due to conflicts"); //FileUtilities.deleteFolderStructure(new File(gitRepoCtx.getLocalRepoPath())); //cloneRepository(gitRepoCtx); throw e; } catch (GitAPIException e) { log.error("Git pull operation for tenant " + gitRepoCtx.getTenantId() + " failed", e); return false; } return true; } /** * Checkout the artifacts from the local index. Any local working copy changes will be discarded. * * @param gitRepoCtx TenantGitRepositoryContext instance for the current tenant * @param paths List of paths for artifacts with conflicts * @return true if successfully checked out all the files from local index, else false */ private boolean checkoutFromLocalIndex(TenantGitRepositoryContext gitRepoCtx, List<String> paths) { boolean checkoutSuccess = false; if(paths.isEmpty()) { return checkoutSuccess; } CheckoutCommand checkoutCmd = gitRepoCtx.getGit().checkout(); for(String path : paths) { checkoutCmd.addPath(path); } try { checkoutCmd.call(); if(log.isDebugEnabled()) { for(String path : paths) { log.debug("Checked out conflicting file " + path + " from the local index successfully"); } } checkoutSuccess = true; log.info("Checked out the conflicting files from the local index successfully"); } catch (GitAPIException e) { log.error("Checking out artifacts from index failed", e); } return checkoutSuccess; } public void initAutoCheckout(boolean b) throws DeploymentSynchronizerException { //no implementation } public void cleanupAutoCheckout() { //no implementation } /** * Return the repository type * * @return repository type, i.e. git */ public String getRepositoryType() { return DeploymentSynchronizerConstants.REPOSITORY_TYPE_GIT; } public List<RepositoryConfigParameter> getParameters() { return null; } /** * Partial checkout with defined depth. Currently not supported in GIT. * * @param tenantId tenant id * @param filePath local repository path * @param depth depth to checkout (0 - 3) * @return if success true, else false * @throws DeploymentSynchronizerException if an error occurs */ public boolean checkout(int tenantId, String filePath, int depth) throws DeploymentSynchronizerException { return checkout(tenantId, filePath); //normal checkout is done } /** * Partial update with defined depth.Currently not supported in GIT. * * @param tenantId tenant Id * @param rootPath root path to the local repository * @param filePath path to sub directory to update * @param depth depth to update (0 - 3) * @return if success true, else false * @throws DeploymentSynchronizerException if an error occurs */ public boolean update(int tenantId, String rootPath, String filePath, int depth) throws DeploymentSynchronizerException { return checkout(tenantId, rootPath); //normal checkout is done } /** * removed tenant's information from the cache * * @param tenantId tenant Id */ public void cleanupTenantContext(int tenantId) { TenantGitRepositoryContextCache.getTenantRepositoryContextCache(). removeCachedTenantGitContext(tenantId); } private void handleError (String errorMsg, Exception e) throws DeploymentSynchronizerException { log.error(errorMsg, e); throw new DeploymentSynchronizerException(errorMsg, e); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.resourcemanager; import org.apache.flink.api.common.time.Time; import org.apache.flink.runtime.clusterframework.types.ResourceID; import org.apache.flink.runtime.clusterframework.types.ResourceProfile; import org.apache.flink.runtime.heartbeat.HeartbeatServices; import org.apache.flink.runtime.highavailability.TestingHighAvailabilityServices; import org.apache.flink.runtime.instance.HardwareDescription; import org.apache.flink.runtime.io.network.partition.ResourceManagerPartitionTrackerImpl; import org.apache.flink.runtime.io.network.partition.ResultPartitionID; import org.apache.flink.runtime.jobgraph.IntermediateDataSetID; import org.apache.flink.runtime.leaderelection.TestingLeaderElectionService; import org.apache.flink.runtime.metrics.groups.UnregisteredMetricGroups; import org.apache.flink.runtime.registration.RegistrationResponse; import org.apache.flink.runtime.resourcemanager.slotmanager.SlotManager; import org.apache.flink.runtime.resourcemanager.slotmanager.SlotManagerBuilder; import org.apache.flink.runtime.rpc.RpcUtils; import org.apache.flink.runtime.rpc.TestingRpcService; import org.apache.flink.runtime.taskexecutor.SlotReport; import org.apache.flink.runtime.taskexecutor.TaskExecutorGateway; import org.apache.flink.runtime.taskexecutor.TaskExecutorHeartbeatPayload; import org.apache.flink.runtime.taskexecutor.TestingTaskExecutorGatewayBuilder; import org.apache.flink.runtime.taskexecutor.partition.ClusterPartitionReport; import org.apache.flink.runtime.testingUtils.TestingUtils; import org.apache.flink.runtime.util.TestingFatalErrorHandler; import org.apache.flink.util.TestLogger; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.concurrent.TimeUnit; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.instanceOf; import static org.junit.Assert.assertThat; /** * Tests for the partition-lifecycle logic in the {@link ResourceManager}. */ public class ResourceManagerPartitionLifecycleTest extends TestLogger { private static final Time TIMEOUT = Time.minutes(2L); private static TestingRpcService rpcService; private TestingHighAvailabilityServices highAvailabilityServices; private TestingLeaderElectionService resourceManagerLeaderElectionService; private TestingFatalErrorHandler testingFatalErrorHandler; private TestingResourceManager resourceManager; @BeforeClass public static void setupClass() { rpcService = new TestingRpcService(); } @Before public void setup() throws Exception { highAvailabilityServices = new TestingHighAvailabilityServices(); resourceManagerLeaderElectionService = new TestingLeaderElectionService(); highAvailabilityServices.setResourceManagerLeaderElectionService(resourceManagerLeaderElectionService); testingFatalErrorHandler = new TestingFatalErrorHandler(); } @After public void after() throws Exception { if (resourceManager != null) { RpcUtils.terminateRpcEndpoint(resourceManager, TIMEOUT); } if (highAvailabilityServices != null) { highAvailabilityServices.closeAndCleanupAllData(); } if (testingFatalErrorHandler.hasExceptionOccurred()) { testingFatalErrorHandler.rethrowError(); } } @AfterClass public static void tearDownClass() throws Exception { if (rpcService != null) { RpcUtils.terminateRpcServices(TIMEOUT, rpcService); } } @Test public void testClusterPartitionReportHandling() throws Exception { final CompletableFuture<Collection<IntermediateDataSetID>> clusterPartitionReleaseFuture = new CompletableFuture<>(); runTest( builder -> builder.setReleaseClusterPartitionsConsumer(clusterPartitionReleaseFuture::complete), (resourceManagerGateway, taskManagerId1, ignored) -> { IntermediateDataSetID dataSetID = new IntermediateDataSetID(); ResultPartitionID resultPartitionID = new ResultPartitionID(); resourceManagerGateway.heartbeatFromTaskManager( taskManagerId1, createTaskExecutorHeartbeatPayload(dataSetID, 2, resultPartitionID, new ResultPartitionID())); // send a heartbeat containing 1 partition less -> partition loss -> should result in partition release resourceManagerGateway.heartbeatFromTaskManager( taskManagerId1, createTaskExecutorHeartbeatPayload(dataSetID, 2, resultPartitionID)); Collection<IntermediateDataSetID> intermediateDataSetIDS = clusterPartitionReleaseFuture.get(TIMEOUT.toMilliseconds(), TimeUnit.MILLISECONDS); assertThat(intermediateDataSetIDS, contains(dataSetID)); }); } @Test public void testTaskExecutorShutdownHandling() throws Exception { final CompletableFuture<Collection<IntermediateDataSetID>> clusterPartitionReleaseFuture = new CompletableFuture<>(); runTest( builder -> builder.setReleaseClusterPartitionsConsumer(clusterPartitionReleaseFuture::complete), (resourceManagerGateway, taskManagerId1, taskManagerId2) -> { IntermediateDataSetID dataSetID = new IntermediateDataSetID(); resourceManagerGateway.heartbeatFromTaskManager( taskManagerId1, createTaskExecutorHeartbeatPayload(dataSetID, 2, new ResultPartitionID())); // we need a partition on another task executor so that there's something to release when one task executor goes down resourceManagerGateway.heartbeatFromTaskManager( taskManagerId2, createTaskExecutorHeartbeatPayload(dataSetID, 2, new ResultPartitionID())); resourceManagerGateway.disconnectTaskManager(taskManagerId2, new RuntimeException("test exception")); Collection<IntermediateDataSetID> intermediateDataSetIDS = clusterPartitionReleaseFuture.get(TIMEOUT.toMilliseconds(), TimeUnit.MILLISECONDS); assertThat(intermediateDataSetIDS, contains(dataSetID)); }); } private void runTest(TaskExecutorSetup taskExecutorBuilderSetup, TestAction testAction) throws Exception { final ResourceManagerGateway resourceManagerGateway = createAndStartResourceManager(); TestingTaskExecutorGatewayBuilder testingTaskExecutorGateway1Builder = new TestingTaskExecutorGatewayBuilder(); taskExecutorBuilderSetup.accept(testingTaskExecutorGateway1Builder); final TaskExecutorGateway taskExecutorGateway1 = testingTaskExecutorGateway1Builder .setAddress(UUID.randomUUID().toString()) .createTestingTaskExecutorGateway(); rpcService.registerGateway(taskExecutorGateway1.getAddress(), taskExecutorGateway1); final TaskExecutorGateway taskExecutorGateway2 = new TestingTaskExecutorGatewayBuilder() .setAddress(UUID.randomUUID().toString()) .createTestingTaskExecutorGateway(); rpcService.registerGateway(taskExecutorGateway2.getAddress(), taskExecutorGateway2); final ResourceID taskManagerId1 = ResourceID.generate(); final ResourceID taskManagerId2 = ResourceID.generate(); registerTaskExecutor(resourceManagerGateway, taskManagerId1, taskExecutorGateway1.getAddress()); registerTaskExecutor(resourceManagerGateway, taskManagerId2, taskExecutorGateway2.getAddress()); testAction.accept(resourceManagerGateway, taskManagerId1, taskManagerId2); } private void registerTaskExecutor(ResourceManagerGateway resourceManagerGateway, ResourceID taskExecutorId, String taskExecutorAddress) throws Exception { final TaskExecutorRegistration taskExecutorRegistration = new TaskExecutorRegistration( taskExecutorAddress, taskExecutorId, 1234, new HardwareDescription(42, 1337L, 1337L, 0L), ResourceProfile.ZERO, ResourceProfile.ZERO); final CompletableFuture<RegistrationResponse> registrationFuture = resourceManagerGateway.registerTaskExecutor( taskExecutorRegistration, TestingUtils.TIMEOUT()); assertThat(registrationFuture.get(), instanceOf(RegistrationResponse.Success.class)); } private ResourceManagerGateway createAndStartResourceManager() throws Exception { final SlotManager slotManager = SlotManagerBuilder.newBuilder() .setScheduledExecutor(rpcService.getScheduledExecutor()) .build(); final JobLeaderIdService jobLeaderIdService = new JobLeaderIdService( highAvailabilityServices, rpcService.getScheduledExecutor(), TestingUtils.infiniteTime()); final TestingResourceManager resourceManager = new TestingResourceManager( rpcService, ResourceManager.RESOURCE_MANAGER_NAME + UUID.randomUUID(), ResourceID.generate(), highAvailabilityServices, new HeartbeatServices(100000L, 1000000L), slotManager, ResourceManagerPartitionTrackerImpl::new, jobLeaderIdService, testingFatalErrorHandler, UnregisteredMetricGroups.createUnregisteredResourceManagerMetricGroup()); resourceManager.start(); // first make the ResourceManager the leader resourceManagerLeaderElectionService.isLeader(ResourceManagerId.generate().toUUID()).get(); this.resourceManager = resourceManager; return resourceManager.getSelfGateway(ResourceManagerGateway.class); } private static TaskExecutorHeartbeatPayload createTaskExecutorHeartbeatPayload(IntermediateDataSetID dataSetId, int numTotalPartitions, ResultPartitionID... partitionIds) { return new TaskExecutorHeartbeatPayload( new SlotReport(), new ClusterPartitionReport(Collections.singletonList( new ClusterPartitionReport.ClusterPartitionReportEntry(dataSetId, new HashSet<>(Arrays.asList(partitionIds)), numTotalPartitions) ))); } @FunctionalInterface private interface TaskExecutorSetup { void accept(TestingTaskExecutorGatewayBuilder taskExecutorGatewayBuilder) throws Exception; } @FunctionalInterface private interface TestAction { void accept(ResourceManagerGateway resourceManagerGateway, ResourceID taskExecutorId1, ResourceID taskExecutorId2) throws Exception; } }
package uk.ac.imperial.doc.gpa.testing.quantitative; import java.text.DecimalFormat; import java.util.LinkedList; import java.util.List; import org.jfree.data.xy.XYSeriesCollection; import com.google.common.collect.Lists; import uk.ac.imperial.doc.jexpressions.constants.Constants; import uk.ac.imperial.doc.jexpressions.expressions.AbstractExpression; import uk.ac.imperial.doc.pctmc.analysis.AnalysisUtils; import uk.ac.imperial.doc.pctmc.analysis.plotexpressions.PlotDescription; import uk.ac.imperial.doc.pctmc.charts.PCTMCChartUtilities; import uk.ac.imperial.doc.pctmc.experiments.iterate.RangeSpecification; import uk.ac.imperial.doc.pctmc.postprocessors.numerical.NumericalPostprocessor; import uk.ac.imperial.doc.pctmc.postprocessors.numerical.NumericalPostprocessorCI; import uk.ac.imperial.doc.pctmc.postprocessors.numerical.ODEAnalysisNumericalPostprocessor; import uk.ac.imperial.doc.pctmc.simulation.PCTMCSimulation; import uk.ac.imperial.doc.pctmc.utils.FileUtils; import uk.ac.imperial.doc.pctmc.utils.PCTMCLogging; import uk.ac.imperial.doc.pctmc.utils.PCTMCOptions; public class ClosureComparison extends RangeRunner { // The evaluated model and used constants protected Constants constants; // Analyses to use for evaluation and expressions // for comparison protected List<ODEAnalysisNumericalPostprocessor> postprocessors; protected List<PlotDescription> plots; protected List<AbstractExpression> expressions; protected List<Integer> newPlotIndices; // Simulation protected PCTMCSimulation simulation; protected NumericalPostprocessor simPostprocessor; protected ErrorEvaluator errorEvaluator; // Accumulated Results protected double[][] maxAverage; // postprocessor x expression protected double[][] averageAverage; protected int totalIterations; // Transient results protected double[][][] maxT; // postprocessor x expression x t protected double[][][] avgT; // postprocessor x expression x t protected String outputFolder; public ClosureComparison( List<ODEAnalysisNumericalPostprocessor> postprocessors, NumericalPostprocessor simPostprocessor, List<PlotDescription> plots, Constants constants, List<RangeSpecification> ranges, String outputFolder, int nParts, boolean toplevel) { super(ranges, toplevel); this.postprocessors = postprocessors; this.simPostprocessor = simPostprocessor; this.plots = plots; this.expressions = new LinkedList<AbstractExpression>(); this.outputFolder = outputFolder; newPlotIndices = new LinkedList<Integer>(); for (PlotDescription pd : plots) { newPlotIndices.add(expressions.size()); expressions.addAll(pd.getExpressions()); } newPlotIndices.add(expressions.size()); this.constants = constants; prepareEvaluators(); this.parts = split(nParts); maxAverage = new double[postprocessors.size()][expressions.size()]; averageAverage = new double[postprocessors.size()][expressions.size()]; maxT = new double[postprocessors.size()] [(int)Math.ceil(simPostprocessor.getStopTime()/simPostprocessor.getStepSize())] [expressions.size()] ; avgT = new double[postprocessors.size()] [(int)Math.ceil(simPostprocessor.getStopTime()/simPostprocessor.getStepSize())] [expressions.size()] ; totalIterations = 0; } public ClosureComparison( List<ODEAnalysisNumericalPostprocessor> postprocessors, NumericalPostprocessor simPostprocessor, List<PlotDescription> plots, Constants constants, List<RangeSpecification> ranges, String outputFolder) { this(postprocessors, simPostprocessor, plots, constants, ranges, outputFolder, PCTMCOptions.nthreads, true); } @Override protected RangeRunner createSlave(List<RangeSpecification> ranges, int nParts) { List<ODEAnalysisNumericalPostprocessor> newPostprocessors = new LinkedList<ODEAnalysisNumericalPostprocessor>(); for (ODEAnalysisNumericalPostprocessor p : postprocessors) { newPostprocessors.add((ODEAnalysisNumericalPostprocessor) p .getNewPreparedPostprocessor(constants)); } NumericalPostprocessor newSimPostprocessor = simPostprocessor .getNewPreparedPostprocessor(constants); return new ClosureComparison(newPostprocessors, newSimPostprocessor, plots, constants, ranges, outputFolder, nParts, false); } @Override protected void join(Constants constants) { System.out.println("Joining data"); for (RangeRunner r : parts) { ClosureComparison part = (ClosureComparison) r; totalIterations += part.getTotalIterations(); for (int i = 0; i < postprocessors.size(); i++) { for (int j = 0; j < expressions.size(); j++) { maxAverage[i][j] = Math.max(maxAverage[i][j], part.getMaxAverage()[i][j]); averageAverage[i][j] += part.getAverageAverage()[i][j]; for (int t = 0; t < part.getMaxT()[0][0].length; t++) { maxT[i][t][j] = Math.max(maxT[i][t][j], part.getMaxT()[i][t][j]); avgT[i][t][j] += part.getAvgT()[i][t][j]; } } } } } @Override protected void runSingle(Constants constants) { runAnalyses(constants); } @Override protected void processData(Constants constants) { System.out.println("Final summary:"); DecimalFormat df = new DecimalFormat("#.##"); for (int i = 0; i < postprocessors.size(); i++) { if (outputFolder != null) { FileUtils.createNeededDirectories(outputFolder+"/" + i + "/tmp"); } int k = -1; double[][] maxAggregateT = new double[maxT[0].length][plots.size()]; double[][] avgAggregateT = new double[maxT[0].length][plots.size()]; System.out.println("Analysis " + i); for (int j = 0; j < expressions.size(); j++) { if (k + 1 < newPlotIndices.size() && j == newPlotIndices.get(k+1)) { k++; } averageAverage[i][j] /= totalIterations; System.out .println(j + "\t max: " + df.format(maxAverage[i][j] * 100.0) + "\t average: " + df .format(averageAverage[i][j] * 100.0) ); for (int t = 0; t < maxT[0].length; t++) { maxAggregateT[t][k] = Math.max(maxAggregateT[t][k], maxT[i][t][j]); avgAggregateT[t][k] += avgT[i][t][j]; avgT[i][t][j] /= totalIterations; } } for (int t = 0; t < maxT[0].length; t++) { for (int l = 0; l < plots.size(); l++) { avgAggregateT[t][l] /= plots.get(l).getExpressions().size() * totalIterations; } } for (int l = 0; l < plots.size(); l++) { double[][] dataMax = new double[maxT[0].length][plots.get(l).getExpressions().size()]; double[][] dataAvg = new double[maxT[0].length][plots.get(l).getExpressions().size()]; Integer start = newPlotIndices.get(l); for (int j = start; j < newPlotIndices.get(l+1); j++) { for (int t = 0; t < dataMax.length; t++) { dataMax[t][j - start] = maxT[i][t][j]; dataAvg[t][j - start] = avgT[i][t][j]; } } String[] names = new String[plots.get(l).getExpressions().size()]; int eI = 0; for (AbstractExpression e : plots.get(l).getExpressions()) { names[eI++] = e.toString(); } XYSeriesCollection datasetMax = AnalysisUtils.getDatasetFromArray(dataMax, simPostprocessor.getStepSize(), names); PCTMCChartUtilities.drawChart(datasetMax, "time", "count", "Max", i + ""); XYSeriesCollection datasetAvg = AnalysisUtils.getDatasetFromArray(dataAvg, simPostprocessor.getStepSize(), names); PCTMCChartUtilities.drawChart(datasetAvg, "time", "count", "Avg", i + ""); if (outputFolder != null) { FileUtils.writeCSVfile(outputFolder + "/" + i + "/max" + l, datasetMax); FileUtils.writeGnuplotFile(outputFolder + "/" + i + "/max" + l, "", Lists.newArrayList(names), "time", "count"); FileUtils.writeCSVfile(outputFolder + "/" + i + "/avg" + l, datasetAvg); FileUtils.writeGnuplotFile(outputFolder + "/" + i + "/avg" + l, "", Lists.newArrayList(names), "time", "count"); } } for (int l = 0; l < plots.size(); l++) { double[][] data = new double[maxAggregateT.length][2]; for (int t = 0; t < data.length; t++) { data[t][0] = maxAggregateT[t][l]; data[t][1] = avgAggregateT[t][l]; } String[] names = new String[]{plots.get(l).toString() + " max max", plots.get(l).toString() + " avg avg"}; XYSeriesCollection datasetAggregate = AnalysisUtils.getDatasetFromArray(data, simPostprocessor.getStepSize(), names); PCTMCChartUtilities.drawChart(datasetAggregate, "time", "count", "Aggregate", i + ""); if (outputFolder != null) { FileUtils.writeCSVfile(outputFolder + "/" + i + "/aggregate" + l, datasetAggregate); FileUtils.writeGnuplotFile(outputFolder + "/" + i + "/aggregate" + l, "", Lists.newArrayList(names), "time", "count"); } } } } protected void prepareEvaluators() { System.out.println("Preparing evaluators"); errorEvaluator = new ErrorEvaluator(postprocessors, simPostprocessor, expressions, constants); } public void runAnalyses(Constants constants) { System.out.println("Running analyses"); totalIterations++; errorEvaluator.calculateErrors(constants); ErrorSummary[][] errors = errorEvaluator.getAccumulatedErrors(); double[][][] transientErrors = errorEvaluator.getTransientErrors(); for (int i = 0; i < errors.length; i++) { for (int j = 0; j < errors[0].length; j++) { maxAverage[i][j] = Math.max(maxAverage[i][j], errors[i][j].getAverageRelative()); averageAverage[i][j] += errors[i][j].getAverageRelative(); for (int t = 0; t < transientErrors[0].length; t++) { maxT[i][t][j] = Math.max(maxT[i][t][j], transientErrors[i][t][j]); avgT[i][t][j] += transientErrors[i][t][j]; } } } for (PlotDescription pd : plots) { if (simPostprocessor instanceof NumericalPostprocessorCI) { ((NumericalPostprocessorCI)simPostprocessor).plotData("Sim", constants, ((NumericalPostprocessorCI) simPostprocessor).getResultsCI().get(pd), pd.getExpressions(), null); } else { simPostprocessor.plotData("Sim", constants, pd.getExpressions(), null); } } for (int i = 0; i < postprocessors.size(); i++) { for (PlotDescription pd : plots) { postprocessors.get(i).plotData(i+"", constants, pd.getExpressions(), null); } } System.out.println(ErrorEvaluator.printSummary(errors)); System.out.println("Finished analyses"); } public double[][] getMaxAverage() { return maxAverage; } public double[][] getAverageAverage() { return averageAverage; } public double[][][] getMaxT() { return maxT; } public double[][][] getAvgT() { return avgT; } public int getTotalIterations() { return totalIterations; } }
// // ======================================================================== // Copyright (c) 1995-2013 Mort Bay Consulting Pty. Ltd. // ------------------------------------------------------------------------ // All rights reserved. This program and the accompanying materials // are made available under the terms of the Eclipse Public License v1.0 // and Apache License v2.0 which accompanies this distribution. // // The Eclipse Public License is available at // http://www.eclipse.org/legal/epl-v10.html // // The Apache License v2.0 is available at // http://www.opensource.org/licenses/apache2.0.php // // You may elect to redistribute this code under either of these licenses. // ======================================================================== // package org.eclipse.jetty.server; import java.io.IOException; import org.eclipse.jetty.http.HttpException; import org.eclipse.jetty.http.HttpStatus; import org.eclipse.jetty.io.AsyncEndPoint; import org.eclipse.jetty.io.Connection; import org.eclipse.jetty.io.EndPoint; import org.eclipse.jetty.io.nio.AsyncConnection; import org.eclipse.jetty.io.nio.SelectChannelEndPoint; import org.eclipse.jetty.util.log.Log; import org.eclipse.jetty.util.log.Logger; /* ------------------------------------------------------------ */ /** Asychronous Server HTTP connection * */ public class AsyncHttpConnection extends AbstractHttpConnection implements AsyncConnection { private final static int NO_PROGRESS_INFO = Integer.getInteger("org.mortbay.jetty.NO_PROGRESS_INFO",100); private final static int NO_PROGRESS_CLOSE = Integer.getInteger("org.mortbay.jetty.NO_PROGRESS_CLOSE",200); private static final Logger LOG = Log.getLogger(AsyncHttpConnection.class); private int _total_no_progress; private final AsyncEndPoint _asyncEndp; private boolean _readInterested = true; public AsyncHttpConnection(Connector connector, EndPoint endpoint, Server server) { super(connector,endpoint,server); _asyncEndp=(AsyncEndPoint)endpoint; } @Override public Connection handle() throws IOException { Connection connection = this; boolean some_progress=false; boolean progress=true; try { setCurrentConnection(this); // don't check for idle while dispatched (unless blocking IO is done). _asyncEndp.setCheckForIdle(false); // While progress and the connection has not changed while (progress && connection==this) { progress=false; try { // Handle resumed request if (_request._async.isAsync()) { if (_request._async.isDispatchable()) handleRequest(); } // else Parse more input else if (!_parser.isComplete() && _parser.parseAvailable()) progress=true; // Generate more output if (_generator.isCommitted() && !_generator.isComplete() && !_endp.isOutputShutdown() && !_request.getAsyncContinuation().isAsyncStarted()) if (_generator.flushBuffer()>0) progress=true; // Flush output _endp.flush(); // Has any IO been done by the endpoint itself since last loop if (_asyncEndp.hasProgressed()) progress=true; } catch (HttpException e) { if (LOG.isDebugEnabled()) { LOG.debug("uri="+_uri); LOG.debug("fields="+_requestFields); LOG.debug(e); } progress=true; _generator.sendError(e.getStatus(), e.getReason(), null, true); } finally { some_progress|=progress; // Is this request/response round complete and are fully flushed? boolean parserComplete = _parser.isComplete(); boolean generatorComplete = _generator.isComplete(); boolean complete = parserComplete && generatorComplete; if (parserComplete) { if (generatorComplete) { // Reset the parser/generator progress=true; // look for a switched connection instance? if (_response.getStatus()==HttpStatus.SWITCHING_PROTOCOLS_101) { Connection switched=(Connection)_request.getAttribute("org.eclipse.jetty.io.Connection"); if (switched!=null) connection=switched; } reset(); // TODO Is this still required? if (!_generator.isPersistent() && !_endp.isOutputShutdown()) { LOG.warn("Safety net oshut!!! IF YOU SEE THIS, PLEASE RAISE BUGZILLA"); _endp.shutdownOutput(); } } else { // We have finished parsing, but not generating so // we must not be interested in reading until we // have finished generating and we reset the generator _readInterested = false; LOG.debug("Disabled read interest while writing response {}", _endp); } } if (!complete && _request.getAsyncContinuation().isAsyncStarted()) { // The request is suspended, so even though progress has been made, // exit the while loop by setting progress to false LOG.debug("suspended {}",this); progress=false; } } } } finally { setCurrentConnection(null); // If we are not suspended if (!_request.getAsyncContinuation().isAsyncStarted()) { // return buffers _parser.returnBuffers(); _generator.returnBuffers(); // reenable idle checking unless request is suspended _asyncEndp.setCheckForIdle(true); } // Safety net to catch spinning if (some_progress) _total_no_progress=0; else { _total_no_progress++; if (NO_PROGRESS_INFO>0 && _total_no_progress%NO_PROGRESS_INFO==0 && (NO_PROGRESS_CLOSE<=0 || _total_no_progress< NO_PROGRESS_CLOSE)) LOG.info("EndPoint making no progress: "+_total_no_progress+" "+_endp+" "+this); if (NO_PROGRESS_CLOSE>0 && _total_no_progress==NO_PROGRESS_CLOSE) { LOG.warn("Closing EndPoint making no progress: "+_total_no_progress+" "+_endp+" "+this); if (_endp instanceof SelectChannelEndPoint) ((SelectChannelEndPoint)_endp).getChannel().close(); } } } return connection; } public void onInputShutdown() throws IOException { // If we don't have a committed response and we are not suspended if (_generator.isIdle() && !_request.getAsyncContinuation().isSuspended()) { // then no more can happen, so close. _endp.close(); } // Make idle parser seek EOF if (_parser.isIdle()) _parser.setPersistent(false); } @Override public void reset() { _readInterested = true; LOG.debug("Enabled read interest {}", _endp); super.reset(); } @Override public boolean isSuspended() { return !_readInterested || super.isSuspended(); } }
package tosca.nodes; import java.io.IOException; import java.util.Collections; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import com.toscaruntime.exception.UnexpectedException; import com.toscaruntime.exception.deployment.configuration.IllegalFunctionException; import com.toscaruntime.exception.deployment.persistence.DeploymentPersistenceException; import com.toscaruntime.sdk.model.AbstractRuntimeType; import com.toscaruntime.sdk.model.DeploymentNode; import com.toscaruntime.sdk.model.DeploymentRelationshipNode; import com.toscaruntime.sdk.model.OperationInputDefinition; import com.toscaruntime.sdk.util.OperationInputUtil; import com.toscaruntime.util.CodeGeneratorUtil; import com.toscaruntime.util.FunctionUtil; import com.toscaruntime.util.JSONUtil; import com.toscaruntime.util.PropertyUtil; public abstract class Root extends AbstractRuntimeType { private int index; private String name; private DeploymentNode node; /** * The parent node is the one the node is attached to or is hosted on */ private Root parent; /** * The direct host node is the one the node is hosted on */ private Root host; private Map<String, Map<String, Object>> capabilitiesProperties; private Set<DeploymentRelationshipNode> preConfiguredRelationshipNodes = Collections.synchronizedSet(new HashSet<>()); private Set<DeploymentRelationshipNode> postConfiguredRelationshipNodes = Collections.synchronizedSet(new HashSet<>()); public Object getCapabilityProperty(String capabilityName, String propertyName) { if (capabilitiesProperties.containsKey(capabilityName)) { return PropertyUtil.getProperty(capabilitiesProperties.get(capabilityName), propertyName); } else { return null; } } public Map<String, Map<String, Object>> getCapabilitiesProperties() { return capabilitiesProperties; } public void setCapabilitiesProperties(Map<String, Map<String, Object>> capabilitiesProperties) { this.capabilitiesProperties = capabilitiesProperties; } public void setName(String name) { this.name = name; } public String getName() { return name; } public DeploymentNode getNode() { return node; } public void setNode(DeploymentNode node) { this.node = node; } @Override public void initialLoad() { Map<String, String> rawAttributes = deployment.getDeploymentPersister().syncGetAttributes(getId()); for (Map.Entry<String, String> rawAttributeEntry : rawAttributes.entrySet()) { try { getAttributes().put(rawAttributeEntry.getKey(), JSONUtil.toObject(rawAttributeEntry.getValue())); } catch (IOException e) { throw new DeploymentPersistenceException("Cannot read as json from persistence attribute " + rawAttributeEntry.getKey() + " of node instance " + getId(), e); } } List<String> outputInterfaces = deployment.getDeploymentPersister().syncGetOutputInterfaces(getId()); for (String interfaceName : outputInterfaces) { List<String> operationNames = deployment.getDeploymentPersister().syncGetOutputOperations(getId(), interfaceName); for (String operationName : operationNames) { Map<String, String> outputs = deployment.getDeploymentPersister().syncGetOutputs(getId(), interfaceName, operationName); operationOutputs.put(CodeGeneratorUtil.getGeneratedMethodName(interfaceName, operationName), outputs); } } this.state = deployment.getDeploymentPersister().syncGetInstanceState(getId()); } @Override public void setState(String newState) { if (!newState.equals(this.state)) { deployment.getDeploymentPersister().syncSaveInstanceState(getId(), newState); this.state = newState; } } @Override public void setAttribute(String key, Object newValue) { Object oldValue = getAttributes().get(key); if (newValue == null) { removeAttribute(key); } else if (!newValue.equals(oldValue)) { try { deployment.getDeploymentPersister().syncSaveInstanceAttribute(getId(), key, JSONUtil.toString(newValue)); } catch (Exception e) { throw new DeploymentPersistenceException("Cannot persist attribute " + key + " of node instance " + getId(), e); } getAttributes().put(key, newValue); } } @Override public void removeAttribute(String key) { deployment.getDeploymentPersister().syncDeleteInstanceAttribute(getId(), key); getAttributes().remove(key); } @Override public void setOperationOutputs(String interfaceName, String operationName, Map<String, String> outputs) { deployment.getDeploymentPersister().syncSaveInstanceOutputs(getId(), interfaceName, operationName, outputs); operationOutputs.put(CodeGeneratorUtil.getGeneratedMethodName(interfaceName, operationName), outputs); } /** * Id of a node is generated based on its index within its parent and its parent index within its grandparent etc ... * For example: A war hosted on a tomcat which is hosted on a compute will have as id war_1_1_1 or war_2_1_1 (if the compute is scaled with 2 instances) * * @return generated id of the node */ public String getId() { String id = getName(); LinkedList<Integer> indexQueue = new LinkedList<>(); indexQueue.push(index); Root currentParent = getParent(); while (currentParent != null) { indexQueue.push(currentParent.getIndex()); currentParent = currentParent.getParent(); } for (Integer index : indexQueue) { id += "_" + index; } return id; } public int getIndex() { return index; } public void setIndex(int index) { this.index = index; } public Compute getComputableHost() { if (host == null) { if (this instanceof Compute) { return (Compute) this; } else { return null; } } Root currentHost = host; while (currentHost.getHost() != null) { currentHost = currentHost.getHost(); } if (currentHost instanceof Compute) { return (Compute) currentHost; } else { return null; } } public Root getParent() { return parent; } public void setParent(Root parent) { this.parent = parent; } public Root getHost() { return host; } public void setHost(Root host) { this.host = host; } public Set<DeploymentRelationshipNode> getPreConfiguredRelationshipNodes() { return preConfiguredRelationshipNodes; } public Set<DeploymentRelationshipNode> getPostConfiguredRelationshipNodes() { return postConfiguredRelationshipNodes; } protected Map<String, String> executeOperation(String operationName, String operationArtifactPath) { Compute host = getComputableHost(); if (host == null) { // This error should be avoided by validating the recipe in compilation phase throw new UnexpectedException("Non hosted node cannot have operation"); } Map<String, OperationInputDefinition> inputDefinitions = operationInputs.get(operationName); Map<String, Object> inputs = OperationInputUtil.evaluateInputDefinitions(inputDefinitions); inputs.put("NODE", getName()); inputs.put("INSTANCE", getId()); inputs.put("INSTANCES", OperationInputUtil.makeInstancesVariable(getNode().getInstances())); inputs.put("HOST", getHost().getName()); for (Root sibling : getNode().getInstances()) { // This will inject also other instances input value Map<String, OperationInputDefinition> siblingInputDefinitions = sibling.getOperationInputs().get(operationName); inputs.putAll(OperationInputUtil.evaluateInputDefinitions(sibling.getId(), siblingInputDefinitions)); } return host.execute(getId(), operationArtifactPath, inputs, getDeploymentArtifacts()); } public void create() { } public void configure() { } public void start() { } public void stop() { } public void delete() { } private String functionToString(String functionName, String... paths) { StringBuilder buffer = new StringBuilder(functionName).append("[ "); for (String path : paths) { buffer.append(path).append(","); } buffer.setLength(buffer.length() - 1); buffer.append("]"); return buffer.toString(); } public Object evaluateFunction(String functionName, String... paths) { if (paths.length == 0) { throw new IllegalFunctionException("Function " + functionName + " path is empty"); } if ("get_input".equals(functionName)) { return getInput(paths[0]); } String entity = paths[0]; Object value; switch (entity) { case "HOST": if (getHost() == null) { throw new IllegalFunctionException("Cannot " + functionToString(functionName, paths) + " as this node does not have a direct host"); } return getHost().evaluateFunction(functionName, FunctionUtil.setEntityToSelf(paths)); case "SELF": switch (functionName) { case "get_property": if (paths.length == 2) { value = getProperty(paths[1]); } else if (paths.length == 3) { value = getCapabilityProperty(paths[1], paths[2]); } else { throw new IllegalFunctionException("get_property must be followed by entity and the property name (2 arguments), or entity then requirement/capability name and property name (3 arguments)"); } break; case "get_attribute": value = getAttribute(paths[1]); break; case "get_operation_output": value = getOperationOutput(paths[1], paths[2], paths[3]); break; default: throw new IllegalFunctionException("Function " + functionName + " is not supported on SELF entity"); } break; default: throw new IllegalFunctionException("Entity " + entity + " is not supported"); } if (value == null) { if (getHost() != null) { return getHost().evaluateFunction(functionName, paths); } else { return ""; } } else { return value; } } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Root root = (Root) o; return getId().equals(root.getId()); } @Override public int hashCode() { return getId().hashCode(); } @Override public String toString() { return "NodeInstance{" + "id='" + getId() + '\'' + '}'; } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.resourcemanager.synapse.implementation; import com.azure.core.annotation.BodyParam; import com.azure.core.annotation.ExpectedResponses; import com.azure.core.annotation.Get; import com.azure.core.annotation.HeaderParam; import com.azure.core.annotation.Headers; import com.azure.core.annotation.Host; import com.azure.core.annotation.HostParam; import com.azure.core.annotation.PathParam; import com.azure.core.annotation.Put; import com.azure.core.annotation.QueryParam; import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceInterface; import com.azure.core.annotation.ServiceMethod; import com.azure.core.annotation.UnexpectedResponseExceptionType; import com.azure.core.http.rest.PagedFlux; import com.azure.core.http.rest.PagedIterable; import com.azure.core.http.rest.PagedResponse; import com.azure.core.http.rest.PagedResponseBase; import com.azure.core.http.rest.Response; import com.azure.core.http.rest.RestProxy; import com.azure.core.management.exception.ManagementException; import com.azure.core.util.Context; import com.azure.core.util.FluxUtil; import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.synapse.fluent.ExtendedSqlPoolBlobAuditingPoliciesClient; import com.azure.resourcemanager.synapse.fluent.models.ExtendedSqlPoolBlobAuditingPolicyInner; import com.azure.resourcemanager.synapse.models.ExtendedSqlPoolBlobAuditingPolicyListResult; import reactor.core.publisher.Mono; /** * An instance of this class provides access to all the operations defined in ExtendedSqlPoolBlobAuditingPoliciesClient. */ public final class ExtendedSqlPoolBlobAuditingPoliciesClientImpl implements ExtendedSqlPoolBlobAuditingPoliciesClient { private final ClientLogger logger = new ClientLogger(ExtendedSqlPoolBlobAuditingPoliciesClientImpl.class); /** The proxy service used to perform REST calls. */ private final ExtendedSqlPoolBlobAuditingPoliciesService service; /** The service client containing this operation class. */ private final SynapseManagementClientImpl client; /** * Initializes an instance of ExtendedSqlPoolBlobAuditingPoliciesClientImpl. * * @param client the instance of the service client containing this operation class. */ ExtendedSqlPoolBlobAuditingPoliciesClientImpl(SynapseManagementClientImpl client) { this.service = RestProxy .create( ExtendedSqlPoolBlobAuditingPoliciesService.class, client.getHttpPipeline(), client.getSerializerAdapter()); this.client = client; } /** * The interface defining all the services for SynapseManagementClientExtendedSqlPoolBlobAuditingPolicies to be used * by the proxy service to perform REST calls. */ @Host("{$host}") @ServiceInterface(name = "SynapseManagementCli") private interface ExtendedSqlPoolBlobAuditingPoliciesService { @Headers({"Content-Type: application/json"}) @Get( "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces" + "/{workspaceName}/sqlPools/{sqlPoolName}/extendedAuditingSettings/{blobAuditingPolicyName}") @ExpectedResponses({200}) @UnexpectedResponseExceptionType(ManagementException.class) Mono<Response<ExtendedSqlPoolBlobAuditingPolicyInner>> get( @HostParam("$host") String endpoint, @QueryParam("api-version") String apiVersion, @PathParam("subscriptionId") String subscriptionId, @PathParam("resourceGroupName") String resourceGroupName, @PathParam("workspaceName") String workspaceName, @PathParam("sqlPoolName") String sqlPoolName, @PathParam("blobAuditingPolicyName") String blobAuditingPolicyName, @HeaderParam("Accept") String accept, Context context); @Headers({"Content-Type: application/json"}) @Put( "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces" + "/{workspaceName}/sqlPools/{sqlPoolName}/extendedAuditingSettings/{blobAuditingPolicyName}") @ExpectedResponses({200, 201}) @UnexpectedResponseExceptionType(ManagementException.class) Mono<Response<ExtendedSqlPoolBlobAuditingPolicyInner>> createOrUpdate( @HostParam("$host") String endpoint, @QueryParam("api-version") String apiVersion, @PathParam("subscriptionId") String subscriptionId, @PathParam("resourceGroupName") String resourceGroupName, @PathParam("workspaceName") String workspaceName, @PathParam("sqlPoolName") String sqlPoolName, @PathParam("blobAuditingPolicyName") String blobAuditingPolicyName, @BodyParam("application/json") ExtendedSqlPoolBlobAuditingPolicyInner parameters, @HeaderParam("Accept") String accept, Context context); @Headers({"Content-Type: application/json"}) @Get( "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces" + "/{workspaceName}/sqlPools/{sqlPoolName}/extendedAuditingSettings") @ExpectedResponses({200}) @UnexpectedResponseExceptionType(ManagementException.class) Mono<Response<ExtendedSqlPoolBlobAuditingPolicyListResult>> listBySqlPool( @HostParam("$host") String endpoint, @QueryParam("api-version") String apiVersion, @PathParam("subscriptionId") String subscriptionId, @PathParam("resourceGroupName") String resourceGroupName, @PathParam("workspaceName") String workspaceName, @PathParam("sqlPoolName") String sqlPoolName, @HeaderParam("Accept") String accept, Context context); @Headers({"Content-Type: application/json"}) @Get("{nextLink}") @ExpectedResponses({200}) @UnexpectedResponseExceptionType(ManagementException.class) Mono<Response<ExtendedSqlPoolBlobAuditingPolicyListResult>> listBySqlPoolNext( @PathParam(value = "nextLink", encoded = true) String nextLink, @HostParam("$host") String endpoint, @HeaderParam("Accept") String accept, Context context); } /** * Gets an extended Sql pool's blob auditing policy. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param sqlPoolName SQL pool name. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return an extended Sql pool's blob auditing policy along with {@link Response} on successful completion of * {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<ExtendedSqlPoolBlobAuditingPolicyInner>> getWithResponseAsync( String resourceGroupName, String workspaceName, String sqlPoolName) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); } if (workspaceName == null) { return Mono.error(new IllegalArgumentException("Parameter workspaceName is required and cannot be null.")); } if (sqlPoolName == null) { return Mono.error(new IllegalArgumentException("Parameter sqlPoolName is required and cannot be null.")); } final String apiVersion = "2021-06-01"; final String blobAuditingPolicyName = "default"; final String accept = "application/json"; return FluxUtil .withContext( context -> service .get( this.client.getEndpoint(), apiVersion, this.client.getSubscriptionId(), resourceGroupName, workspaceName, sqlPoolName, blobAuditingPolicyName, accept, context)) .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); } /** * Gets an extended Sql pool's blob auditing policy. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param sqlPoolName SQL pool name. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return an extended Sql pool's blob auditing policy along with {@link Response} on successful completion of * {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<ExtendedSqlPoolBlobAuditingPolicyInner>> getWithResponseAsync( String resourceGroupName, String workspaceName, String sqlPoolName, Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); } if (workspaceName == null) { return Mono.error(new IllegalArgumentException("Parameter workspaceName is required and cannot be null.")); } if (sqlPoolName == null) { return Mono.error(new IllegalArgumentException("Parameter sqlPoolName is required and cannot be null.")); } final String apiVersion = "2021-06-01"; final String blobAuditingPolicyName = "default"; final String accept = "application/json"; context = this.client.mergeContext(context); return service .get( this.client.getEndpoint(), apiVersion, this.client.getSubscriptionId(), resourceGroupName, workspaceName, sqlPoolName, blobAuditingPolicyName, accept, context); } /** * Gets an extended Sql pool's blob auditing policy. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param sqlPoolName SQL pool name. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return an extended Sql pool's blob auditing policy on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<ExtendedSqlPoolBlobAuditingPolicyInner> getAsync( String resourceGroupName, String workspaceName, String sqlPoolName) { return getWithResponseAsync(resourceGroupName, workspaceName, sqlPoolName) .flatMap( (Response<ExtendedSqlPoolBlobAuditingPolicyInner> res) -> { if (res.getValue() != null) { return Mono.just(res.getValue()); } else { return Mono.empty(); } }); } /** * Gets an extended Sql pool's blob auditing policy. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param sqlPoolName SQL pool name. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return an extended Sql pool's blob auditing policy. */ @ServiceMethod(returns = ReturnType.SINGLE) public ExtendedSqlPoolBlobAuditingPolicyInner get( String resourceGroupName, String workspaceName, String sqlPoolName) { return getAsync(resourceGroupName, workspaceName, sqlPoolName).block(); } /** * Gets an extended Sql pool's blob auditing policy. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param sqlPoolName SQL pool name. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return an extended Sql pool's blob auditing policy along with {@link Response}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<ExtendedSqlPoolBlobAuditingPolicyInner> getWithResponse( String resourceGroupName, String workspaceName, String sqlPoolName, Context context) { return getWithResponseAsync(resourceGroupName, workspaceName, sqlPoolName, context).block(); } /** * Creates or updates an extended Sql pool's blob auditing policy. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param sqlPoolName SQL pool name. * @param parameters The extended Sql pool blob auditing policy. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return an extended Sql pool blob auditing policy along with {@link Response} on successful completion of {@link * Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<ExtendedSqlPoolBlobAuditingPolicyInner>> createOrUpdateWithResponseAsync( String resourceGroupName, String workspaceName, String sqlPoolName, ExtendedSqlPoolBlobAuditingPolicyInner parameters) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); } if (workspaceName == null) { return Mono.error(new IllegalArgumentException("Parameter workspaceName is required and cannot be null.")); } if (sqlPoolName == null) { return Mono.error(new IllegalArgumentException("Parameter sqlPoolName is required and cannot be null.")); } if (parameters == null) { return Mono.error(new IllegalArgumentException("Parameter parameters is required and cannot be null.")); } else { parameters.validate(); } final String apiVersion = "2021-06-01"; final String blobAuditingPolicyName = "default"; final String accept = "application/json"; return FluxUtil .withContext( context -> service .createOrUpdate( this.client.getEndpoint(), apiVersion, this.client.getSubscriptionId(), resourceGroupName, workspaceName, sqlPoolName, blobAuditingPolicyName, parameters, accept, context)) .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); } /** * Creates or updates an extended Sql pool's blob auditing policy. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param sqlPoolName SQL pool name. * @param parameters The extended Sql pool blob auditing policy. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return an extended Sql pool blob auditing policy along with {@link Response} on successful completion of {@link * Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<ExtendedSqlPoolBlobAuditingPolicyInner>> createOrUpdateWithResponseAsync( String resourceGroupName, String workspaceName, String sqlPoolName, ExtendedSqlPoolBlobAuditingPolicyInner parameters, Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); } if (workspaceName == null) { return Mono.error(new IllegalArgumentException("Parameter workspaceName is required and cannot be null.")); } if (sqlPoolName == null) { return Mono.error(new IllegalArgumentException("Parameter sqlPoolName is required and cannot be null.")); } if (parameters == null) { return Mono.error(new IllegalArgumentException("Parameter parameters is required and cannot be null.")); } else { parameters.validate(); } final String apiVersion = "2021-06-01"; final String blobAuditingPolicyName = "default"; final String accept = "application/json"; context = this.client.mergeContext(context); return service .createOrUpdate( this.client.getEndpoint(), apiVersion, this.client.getSubscriptionId(), resourceGroupName, workspaceName, sqlPoolName, blobAuditingPolicyName, parameters, accept, context); } /** * Creates or updates an extended Sql pool's blob auditing policy. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param sqlPoolName SQL pool name. * @param parameters The extended Sql pool blob auditing policy. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return an extended Sql pool blob auditing policy on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<ExtendedSqlPoolBlobAuditingPolicyInner> createOrUpdateAsync( String resourceGroupName, String workspaceName, String sqlPoolName, ExtendedSqlPoolBlobAuditingPolicyInner parameters) { return createOrUpdateWithResponseAsync(resourceGroupName, workspaceName, sqlPoolName, parameters) .flatMap( (Response<ExtendedSqlPoolBlobAuditingPolicyInner> res) -> { if (res.getValue() != null) { return Mono.just(res.getValue()); } else { return Mono.empty(); } }); } /** * Creates or updates an extended Sql pool's blob auditing policy. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param sqlPoolName SQL pool name. * @param parameters The extended Sql pool blob auditing policy. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return an extended Sql pool blob auditing policy. */ @ServiceMethod(returns = ReturnType.SINGLE) public ExtendedSqlPoolBlobAuditingPolicyInner createOrUpdate( String resourceGroupName, String workspaceName, String sqlPoolName, ExtendedSqlPoolBlobAuditingPolicyInner parameters) { return createOrUpdateAsync(resourceGroupName, workspaceName, sqlPoolName, parameters).block(); } /** * Creates or updates an extended Sql pool's blob auditing policy. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param sqlPoolName SQL pool name. * @param parameters The extended Sql pool blob auditing policy. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return an extended Sql pool blob auditing policy along with {@link Response}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<ExtendedSqlPoolBlobAuditingPolicyInner> createOrUpdateWithResponse( String resourceGroupName, String workspaceName, String sqlPoolName, ExtendedSqlPoolBlobAuditingPolicyInner parameters, Context context) { return createOrUpdateWithResponseAsync(resourceGroupName, workspaceName, sqlPoolName, parameters, context) .block(); } /** * Lists extended auditing settings of a Sql pool. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param sqlPoolName SQL pool name. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a list of sql pool extended auditing settings along with {@link PagedResponse} on successful completion * of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<ExtendedSqlPoolBlobAuditingPolicyInner>> listBySqlPoolSinglePageAsync( String resourceGroupName, String workspaceName, String sqlPoolName) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); } if (workspaceName == null) { return Mono.error(new IllegalArgumentException("Parameter workspaceName is required and cannot be null.")); } if (sqlPoolName == null) { return Mono.error(new IllegalArgumentException("Parameter sqlPoolName is required and cannot be null.")); } final String apiVersion = "2021-06-01"; final String accept = "application/json"; return FluxUtil .withContext( context -> service .listBySqlPool( this.client.getEndpoint(), apiVersion, this.client.getSubscriptionId(), resourceGroupName, workspaceName, sqlPoolName, accept, context)) .<PagedResponse<ExtendedSqlPoolBlobAuditingPolicyInner>>map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)) .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); } /** * Lists extended auditing settings of a Sql pool. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param sqlPoolName SQL pool name. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a list of sql pool extended auditing settings along with {@link PagedResponse} on successful completion * of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<ExtendedSqlPoolBlobAuditingPolicyInner>> listBySqlPoolSinglePageAsync( String resourceGroupName, String workspaceName, String sqlPoolName, Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); } if (workspaceName == null) { return Mono.error(new IllegalArgumentException("Parameter workspaceName is required and cannot be null.")); } if (sqlPoolName == null) { return Mono.error(new IllegalArgumentException("Parameter sqlPoolName is required and cannot be null.")); } final String apiVersion = "2021-06-01"; final String accept = "application/json"; context = this.client.mergeContext(context); return service .listBySqlPool( this.client.getEndpoint(), apiVersion, this.client.getSubscriptionId(), resourceGroupName, workspaceName, sqlPoolName, accept, context) .map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)); } /** * Lists extended auditing settings of a Sql pool. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param sqlPoolName SQL pool name. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a list of sql pool extended auditing settings. */ @ServiceMethod(returns = ReturnType.COLLECTION) private PagedFlux<ExtendedSqlPoolBlobAuditingPolicyInner> listBySqlPoolAsync( String resourceGroupName, String workspaceName, String sqlPoolName) { return new PagedFlux<>( () -> listBySqlPoolSinglePageAsync(resourceGroupName, workspaceName, sqlPoolName), nextLink -> listBySqlPoolNextSinglePageAsync(nextLink)); } /** * Lists extended auditing settings of a Sql pool. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param sqlPoolName SQL pool name. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a list of sql pool extended auditing settings. */ @ServiceMethod(returns = ReturnType.COLLECTION) private PagedFlux<ExtendedSqlPoolBlobAuditingPolicyInner> listBySqlPoolAsync( String resourceGroupName, String workspaceName, String sqlPoolName, Context context) { return new PagedFlux<>( () -> listBySqlPoolSinglePageAsync(resourceGroupName, workspaceName, sqlPoolName, context), nextLink -> listBySqlPoolNextSinglePageAsync(nextLink, context)); } /** * Lists extended auditing settings of a Sql pool. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param sqlPoolName SQL pool name. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a list of sql pool extended auditing settings. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<ExtendedSqlPoolBlobAuditingPolicyInner> listBySqlPool( String resourceGroupName, String workspaceName, String sqlPoolName) { return new PagedIterable<>(listBySqlPoolAsync(resourceGroupName, workspaceName, sqlPoolName)); } /** * Lists extended auditing settings of a Sql pool. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param sqlPoolName SQL pool name. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a list of sql pool extended auditing settings. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<ExtendedSqlPoolBlobAuditingPolicyInner> listBySqlPool( String resourceGroupName, String workspaceName, String sqlPoolName, Context context) { return new PagedIterable<>(listBySqlPoolAsync(resourceGroupName, workspaceName, sqlPoolName, context)); } /** * Get the next page of items. * * @param nextLink The nextLink parameter. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a list of sql pool extended auditing settings along with {@link PagedResponse} on successful completion * of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<ExtendedSqlPoolBlobAuditingPolicyInner>> listBySqlPoolNextSinglePageAsync( String nextLink) { if (nextLink == null) { return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null.")); } if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } final String accept = "application/json"; return FluxUtil .withContext(context -> service.listBySqlPoolNext(nextLink, this.client.getEndpoint(), accept, context)) .<PagedResponse<ExtendedSqlPoolBlobAuditingPolicyInner>>map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)) .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); } /** * Get the next page of items. * * @param nextLink The nextLink parameter. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a list of sql pool extended auditing settings along with {@link PagedResponse} on successful completion * of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<ExtendedSqlPoolBlobAuditingPolicyInner>> listBySqlPoolNextSinglePageAsync( String nextLink, Context context) { if (nextLink == null) { return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null.")); } if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } final String accept = "application/json"; context = this.client.mergeContext(context); return service .listBySqlPoolNext(nextLink, this.client.getEndpoint(), accept, context) .map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)); } }
package jackpal.androidterm; import android.annotation.SuppressLint; import android.content.Context; import android.os.Build; import android.os.Environment; import android.os.Handler; import android.os.Message; import android.support.annotation.RequiresApi; import android.util.Log; import android.widget.Toast; import com.quseit.config.CONF; import com.quseit.util.FileHelper; import com.quseit.util.FileUtils; import com.quseit.util.NAction; import com.quseit.util.NUtil; import java.io.File; import java.io.FileDescriptor; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.UnsupportedEncodingException; import java.util.ArrayList; import jackpal.androidterm.compat.FileCompat; import jackpal.androidterm.emulatorview.ColorScheme; import jackpal.androidterm.emulatorview.TermSession; import jackpal.androidterm.emulatorview.UpdateCallback; import jackpal.androidterm.util.NStorage; import jackpal.androidterm.util.TermSettings; /** * A terminal session, consisting of a TerminalEmulator, a TranscriptScreen, * the PID of the process attached to the session, and the I/O streams used to * talk to the process. */ public class ShellTermSession extends TermSession { //** Set to true to force into 80 x 24 for testing with vttest. */ private static final boolean VTTEST_MODE = false; private TermSettings mSettings; private Context context; private static final String TAG = "ShellTermSession"; private int mProcId; private FileDescriptor mTermFd; private Thread mWatcherThread; // A cookie which uniquely identifies this session. private String mHandle; private String pyPath = ""; private String mInitialCommand; private boolean isEnd = false; public static final int PROCESS_EXIT_FINISHES_SESSION = 0; public static final int PROCESS_EXIT_DISPLAYS_MESSAGE = 1; private int mProcessExitBehavior = PROCESS_EXIT_FINISHES_SESSION; private String mProcessExitMessage; private static final int PROCESS_EXITED = 1; @SuppressLint("HandlerLeak") private Handler mMsgHandler = new Handler() { @Override public void handleMessage(Message msg) { if (!isRunning()) { //Log.d("TERM", "isRunning"); return; } if (msg.what == PROCESS_EXITED) { //Log.d("TERM", "PROCESS_EXITED"); onProcessExit((Integer) msg.obj); } } }; private UpdateCallback mUTF8ModeNotify = new UpdateCallback() { public void onUpdate() { Exec.setPtyUTF8Mode(mTermFd, getUTF8Mode()); } }; public boolean getEndStat() { return this.isEnd; } public ShellTermSession(Context context, TermSettings settings, String cmd, String pyPath) { super(); Log.d(TAG, "ShellTermSession(cmd):"+cmd); this.context = context; this.pyPath = pyPath; this.isEnd = false; updatePrefs(settings); initializeSession(cmd); this.mInitialCommand = cmd; mWatcherThread = new Thread() { @Override public void run() { Log.i(TermDebug.LOG_TAG, "waiting for: " + mProcId); int result = Exec.waitFor(mProcId); Log.i(TermDebug.LOG_TAG, "Subprocess exited: " + result); mMsgHandler.sendMessage(mMsgHandler.obtainMessage(PROCESS_EXITED, result)); isEnd = true; } }; mWatcherThread.setName("Process watcher"); //Log.d(TermDebug.LOG_TAG, "ShellTermSession:"+cmd); } public void shellRun() { //Exec.setPtyUTF8Mode(mTermFd, getUTF8Mode()); //setUTF8ModeUpdateCallback(mUTF8ModeNotify); initializeEmulator(80,24); /*mWatcherThread.start(); sendInitialCommand(mInitialCommand);*/ } public void updatePrefs(TermSettings settings) { mSettings = settings; try { setColorScheme(new ColorScheme(settings.getColorScheme())); setDefaultUTF8Mode(settings.defaultToUTF8Mode()); } catch (Exception e) { } } private void initializeSession(String cmd) { Log.d(TAG, "initializeSession:"+cmd); TermSettings settings = mSettings; int[] processId = new int[1]; String path = System.getenv("PATH"); if (settings.doPathExtensions()) { String appendPath = settings.getAppendPath(); if (appendPath != null && appendPath.length() > 0) { path = path + ":" + appendPath; } if (settings.allowPathPrepend()) { String prependPath = settings.getPrependPath(); if (prependPath != null && prependPath.length() > 0) { path = prependPath + ":" + path; } } } if (settings.verifyPath()) { path = checkPath(path); } String[] env = new String[20]; File filesDir = this.context.getFilesDir(); // HACKED FOR QPython File externalStorage = new File(Environment.getExternalStorageDirectory(), CONF.BASE_PATH); if (!externalStorage.exists()) { externalStorage.mkdir(); } File td = new File(externalStorage+"/cache"); if (!td.exists()) { td.mkdir(); } String py3 = NAction.getQPyInterpreter(this.context); String abiLib = NUtil.is64Bit() ? filesDir+"/libs/arm64-v8a/" : filesDir+"/libs/armeabi-v7a/"; env[0] = "TERM=" + settings.getTermType(); env[1] = "PATH=" + this.context.getFilesDir()+"/bin"+":"+path; env[2] = "LD_LIBRARY_PATH=.:"+filesDir+"/lib/"+":"+filesDir+"/:"+filesDir.getParentFile()+"/lib/:"+abiLib; env[3] = "PYTHONHOME="+filesDir; env[4] = "ANDROID_PRIVATE="+filesDir; env[5] = "PYTHONPATH=" +filesDir+"/lib/"+py3+"/site-packages/:" +filesDir+"/lib/"+py3+"/:" +filesDir+"/lib/"+py3.replace(".","")+".zip:" +filesDir+"/lib/"+py3+"/qpyutil.zip:" +filesDir+"/lib/"+py3+"/lib-dynload/:" +externalStorage+"/lib/"+py3+"/site-packages/:" +pyPath; env[6] = "PYTHONOPTIMIZE=2"; env[7] = "TMPDIR="+externalStorage+"/cache"; env[8] = "AP_HOST="+NStorage.getSP(this.context, "sl4a.hostname"); env[9] = "AP_PORT="+NStorage.getSP(this.context, "sl4a.port"); env[10] = "AP_HANDSHAKE="+NStorage.getSP(this.context, "sl4a.secue"); env[11] = "ANDROID_PUBLIC="+externalStorage; env[12] = "ANDROID_ARGUMENT="+pyPath; env[13] = "IS_QPY3=1"; env[14] = "QPY_USERNO="+NAction.getUserNoId(context); env[15] = "QPY_ARGUMENT="+NAction.getExtConf(context); env[16] = "PYTHONDONTWRITEBYTECODE=1"; env[17] = "TMP="+externalStorage+"/cache"; env[18] = "ANDROID_APP_PATH="+externalStorage+""; env[19] = "LANG=en_US.UTF-8"; File enf = new File(context.getFilesDir()+"/bin/init.sh"); //if (! enf.exists()) { String content = "#!/system/bin/sh"; for (int i=0;i<env.length;i++) { content += "\nexport "+env[i]; } FileHelper.putFileContents(context, enf.getAbsolutePath(), content.trim()); try { FileUtils.chmod(enf, 0755); } catch (Exception e) { e.printStackTrace(); } //} //File libPath = context.getFilesDir(); //loadLibrary(libPath); createSubprocess(processId, settings.getShell(), env); mProcId = processId[0]; setTermOut(new FileOutputStream(mTermFd)); setTermIn(new FileInputStream(mTermFd)); } private String checkPath(String path) { String[] dirs = path.split(":"); StringBuilder checkedPath = new StringBuilder(path.length()); for (String dirname : dirs) { File dir = new File(dirname); if (dir.isDirectory() && FileCompat.canExecute(dir)) { checkedPath.append(dirname); checkedPath.append(":"); } } return checkedPath.substring(0, checkedPath.length()-1); } @Override public void initializeEmulator(int columns, int rows) { if (VTTEST_MODE) { columns = 80; rows = 24; } super.initializeEmulator(columns, rows); Exec.setPtyUTF8Mode(mTermFd, getUTF8Mode()); setUTF8ModeUpdateCallback(mUTF8ModeNotify); mWatcherThread.start(); sendInitialCommand(mInitialCommand); } private void sendInitialCommand(String initialCommand) { //Log.d("TERM", "sendInitialCommand:"+initialCommand); if (initialCommand.length() > 0) { write(initialCommand + '\r'); } } private void createSubprocess(int[] processId, String shell, String[] env) { ArrayList<String> argList = parse(shell); String arg0; String[] args; try { arg0 = argList.get(0); File file = new File(arg0); if (!file.exists()) { Log.e(TermDebug.LOG_TAG, "Shell " + arg0 + " not found!"); throw new FileNotFoundException(arg0); } else if (!FileCompat.canExecute(file)) { Log.e(TermDebug.LOG_TAG, "Shell " + arg0 + " not executable!"); throw new FileNotFoundException(arg0); } args = argList.toArray(new String[1]); } catch (Exception e) { argList = parse(mSettings.getFailsafeShell()); arg0 = argList.get(0); args = argList.toArray(new String[1]); } mTermFd = Exec.createSubprocess(arg0, args, env, processId); } private ArrayList<String> parse(String cmd) { final int PLAIN = 0; final int WHITESPACE = 1; final int INQUOTE = 2; int state = WHITESPACE; ArrayList<String> result = new ArrayList<String>(); int cmdLen = cmd.length(); StringBuilder builder = new StringBuilder(); for (int i = 0; i < cmdLen; i++) { char c = cmd.charAt(i); if (state == PLAIN) { if (Character.isWhitespace(c)) { result.add(builder.toString()); builder.delete(0,builder.length()); state = WHITESPACE; } else if (c == '"') { state = INQUOTE; } else { builder.append(c); } } else if (state == WHITESPACE) { if (Character.isWhitespace(c)) { // do nothing } else if (c == '"') { state = INQUOTE; } else { state = PLAIN; builder.append(c); } } else if (state == INQUOTE) { if (c == '\\') { if (i + 1 < cmdLen) { i += 1; builder.append(cmd.charAt(i)); } } else if (c == '"') { state = PLAIN; } else { builder.append(c); } } } if (builder.length() > 0) { result.add(builder.toString()); } return result; } @Override public void updateSize(int columns, int rows) { if (VTTEST_MODE) { columns = 80; rows = 24; } // Inform the attached pty of our new size: Exec.setPtyWindowSize(mTermFd, rows, columns, 0, 0); super.updateSize(columns, rows); } /* XXX We should really get this ourselves from the resource bundle, but we cannot hold a context */ public void setProcessExitMessage(String message) { mProcessExitMessage = message; } private void onProcessExit(int result) { if (mSettings.closeWindowOnProcessExit()) { finish(); } else if (mProcessExitMessage != null) { try { byte[] msg = ("\r\n[" + mProcessExitMessage + "]").getBytes("UTF-8"); appendToEmulator(msg, 0, msg.length); notifyUpdate(); } catch (UnsupportedEncodingException e) { // Never happens } } } @Override public void finish() { //Log.d("ShellTermSession", "finish"); Exec.hangupProcessGroup(mProcId); Exec.close(mTermFd); super.finish(); } /** * Gets the terminal session's title. Unlike the superclass's getTitle(), * if the title is null or an empty string, the provided default title will * be returned instead. * * @param defaultTitle The default title to use if this session's title is * unset or an empty string. */ public String getTitle(String defaultTitle) { String title = super.getTitle(); if (title != null && title.length() > 0) { return title; } else { return defaultTitle; } } public void setHandle(String handle) { if (mHandle != null) { throw new IllegalStateException("Cannot change handle once set"); } mHandle = handle; } public String getHandle() { return mHandle; } }
/** * SIX OVAL - https://nakamura5akihito.github.io/ * Copyright (C) 2010 Akihito Nakamura * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.opensec.six.oval.model.definitions; import io.opensec.six.oval.model.ElementType; import io.opensec.six.oval.model.OvalObject; import io.opensec.six.oval.model.common.OvalId; import io.opensec.util.persist.Persistable; import java.io.Serializable; import java.util.Collection; import java.util.HashSet; import java.util.Iterator; import java.util.Set; import org.mongodb.morphia.annotations.Entity; import org.mongodb.morphia.annotations.Id; /** * * @author Akihito Nakamura, AIST * @see <a href="http://oval.mitre.org/language/">OVAL Language</a> */ @Entity( "oval.def.element_assoc" ) public class DefinitionsElementAssoc implements Persistable<String>, OvalObject { public static final class Ref implements Serializable { private final ElementType type; private final String oval_id; public Ref( final String oval_id ) { this( OvalId.elementTypeOf( oval_id ), oval_id ); } public Ref( final ElementType type, final String oval_id ) { this.type = type; this.oval_id = oval_id; } public ElementType getType() { return type; } public String getOvalID() { return oval_id; } //************************************************************** // java.lang.Object //************************************************************** @Override public int hashCode() { final int prime = 37; int result = 17; result = prime * result + (oval_id == null ? 0 : oval_id.hashCode()); return result; } @Override public boolean equals( final Object obj ) { if (this == obj) { return true; } if (!(obj instanceof Ref)) { return false; } Ref other = (Ref)obj; String other_oval_id = other.getOvalID(); String this_oval_id = this.getOvalID(); if (this_oval_id == other_oval_id || (this_oval_id != null && this_oval_id.equals( other_oval_id ))) { return true; } return false; } @Override public String toString() { return "[type=" + getType() + ", id=" + getOvalID() + "]"; } } // @Id private String oval_id; private ElementType type; private final Set<Ref> references = new HashSet<Ref>(); /** * Constructor. */ public DefinitionsElementAssoc() { } /** */ public DefinitionsElementAssoc( final DefinitionType def ) { setOvalID( def.getOvalId() ); setType( ElementType.DEFINITION ); _build( references, def ); } public DefinitionsElementAssoc( final TestType tst ) { setOvalID( tst.getOvalId() ); setType( ElementType.TEST ); _build( references, tst ); } /** * Sets the OVAL-ID. * * @param oval_id * the OVAL-ID. */ public void setOvalID( final String oval_id ) { this.oval_id = oval_id; } /** * Retuens the OVAL-ID. * * @return * the OVAL-ID. */ public String getOvalID() { return oval_id; } /** */ public void setType( final ElementType type ) { this.type = type; } public ElementType getType() { return type; } /** */ public void setReferences( final Collection<? extends Ref> refs ) { if (refs == null) { return; } if (references != refs) { references.clear(); references.addAll( refs ); } } public Set<Ref> getReferences() { return references; } public Iterator<Ref> iterateReferences() { return references.iterator(); } public Set<Ref> getReferences( final ElementType type ) { Set<Ref> refs = new HashSet<Ref>(); for (Ref r : references) { if (type == r.getType()) { refs.add( r ); } } return refs; } //////////////////////////////////////////////////////////////// // test //////////////////////////////////////////////////////////////// /** * test */ private void _build( final Set<Ref> refs, final TestType tst ) { _build( refs, tst.getObject() ); Collection<StateRefType> states = tst.getState(); if (states != null) { for (StateRefType s : states) { _build( refs, s ); } } } /** * test/object */ private void _build( final Set<Ref> refs, final SystemObjectRefType object ) { if (object != null) { String oval_id = object.getObjectRef(); if (oval_id != null) { refs.add( new Ref( ElementType.OBJECT, oval_id ) ); } } } /** * test/state */ private void _build( final Set<Ref> refs, final StateRefType state ) { if (state != null) { String oval_id = state.getStateRef(); if (oval_id != null) { refs.add( new Ref( ElementType.STATE, oval_id ) ); } } } //////////////////////////////////////////////////////////////// // definition //////////////////////////////////////////////////////////////// private void _build( final Set<Ref> refs, final DefinitionType def ) { CriteriaType criteria = def.getCriteria(); if (criteria != null) { _build( refs, criteria ); } } private void _build( final Set<Ref> refs, final CriteriaType critera ) { for (CriteriaElement e : critera.getElements()) { if (e instanceof CriterionType) { _build( refs, CriterionType.class.cast( e ) ); } else if (e instanceof CriteriaType) { _build( refs, CriteriaType.class.cast( e ) ); } else if (e instanceof ExtendDefinitionType) { _build( refs, ExtendDefinitionType.class.cast( e ) ); } } } private void _build( final Set<Ref> refs, final CriterionType criterion ) { if (criterion == null) { throw new IllegalArgumentException( "null criterion" ); } String oval_id = criterion.getTestRef(); if (oval_id == null) { throw new IllegalArgumentException( "null test_ref" ); } refs.add( new Ref( ElementType.TEST, oval_id ) ); } private void _build( final Set<Ref> refs, final ExtendDefinitionType criterion ) { if (criterion == null) { throw new IllegalArgumentException( "null criterion" ); } String oval_id = criterion.getDefinitionRef(); if (oval_id == null) { throw new IllegalArgumentException( "null definition_ref" ); } refs.add( new Ref( ElementType.DEFINITION, oval_id ) ); } // //////////////////////////////////////////////////////////////// // // private final Map<DefinitionsElement.Type, Set<String>> _referencing_map = // new EnumMap<DefinitionsElement.Type, Set<String>>( DefinitionsElement.Type.class ); // // // // // public ElementReferencingMap( // final String oval_id // ) // { // this( oval_id, DefinitionsElement.Type.fromOvalId( oval_id ) ); // } // // // public ElementReferencingMap( // final String oval_id, // final DefinitionsElement.Type type // ) // { // setOvalID( oval_id ); // setType( type ); // } // // // /** // */ // public void setReferencingElements( // final DefinitionsElement.Type type, // final Collection<? extends String> oval_ids // ) // { // Set<String> ids = _referencing_map.get( type ); // if (ids == null) { // ids = new HashSet<String>(); // _referencing_map.put( type, ids ); // } else { // ids.clear(); // } // // ids.addAll( oval_ids ); // } // // // public void addReferencingElements( // final DefinitionsElement.Type type, // final ElementContainer<? extends Element> elements // ) // { // Set<String> ids = _referencing_map.get( type ); // if (ids == null) { // ids = new HashSet<String>(); // _referencing_map.put( type, ids ); // } else { // ids.clear(); // } // // Iterator<? extends Element> i = elements.iterator(); // while (i.hasNext()) { // Element e = i.next(); // ids.add( e.getOvalID() ); // } // } // // // public void addReferencingElement( // final DefinitionsElement.Type type, // final Element element // ) // { // addReferencingId( type, element.getOvalID() ); // } // // // public void addReferencingId( // final DefinitionsElement.Type type, // final String oval_id // ) // { // if (type == null) { // throw new IllegalArgumentException( "null element type" ); // } // // if (oval_id == null) { // throw new IllegalArgumentException( "null OVAL-ID" ); // } // // Set<String> ids = _referencing_map.get( type ); // if (ids == null) { // ids = new HashSet<String>(); // _referencing_map.put( type, ids ); // } else { // ids.clear(); // } // // ids.add( oval_id ); // } // // // public void addReferencingId( // final String oval_id // ) // { // addReferencingId( DefinitionsElement.Type.fromOvalId( oval_id ), oval_id ); // } // // // public Set<String> getReferencingIds( // final DefinitionsElement.Type type // ) // { // return _referencing_map.get( type ); // } // // // public Map<DefinitionsElement.Type, Set<String>> getReferencingIds() // { // return _referencing_map; // } //************************************************************** // Persistable //************************************************************** @Override public void setPersistentID( final String pid ) { setOvalID( pid ); } @Override public String getPersistentID() { return getOvalID(); } //************************************************************** // java.lang.Object //************************************************************** // @Override // public int hashCode() // { // return super.hashCode(); // } // // // // @Override // public boolean equals( // final Object obj // ) // { // if (!(obj instanceof ElementReferencingMap)) { // return false; // } // // return super.equals( obj ); // } @Override public String toString() { return "[oval_id=" + getOvalID() + ", type=" + getType() + ", references=" + getReferences() + "]"; } } //
package org.continuity.api.entities.config.cobra; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.annotation.JsonProperty; /** * * @author Henning Schulz * */ @JsonIgnoreProperties(ignoreUnknown = true) public class AppendStrategyConfiguration { private AppendStrategy strategy = AppendStrategy.KMEANS; @JsonProperty("avg-transition-tolerance") @JsonInclude(Include.NON_NULL) private Double avgTransitionTolerance = null; @JsonInclude(Include.NON_NULL) private Double epsilon = null; @JsonIgnore private boolean epsilonSet = false; @JsonProperty("min-sample-size") @JsonInclude(Include.NON_NULL) private Long minSampleSize = null; @JsonInclude(Include.NON_NULL) private Long k = null; @JsonProperty("max-iterations") @JsonInclude(Include.NON_NULL) private Long maxIterations = null; @JsonProperty("num-seedings") @JsonInclude(Include.NON_NULL) private Long numSeedings = null; @JsonProperty("convergence-tolerance") @JsonInclude(Include.NON_NULL) private Double convergenceTolerance = null; @JsonInclude(Include.NON_NULL) private Integer parallelize = null; @JsonProperty("quantile-range") @JsonInclude(Include.NON_NULL) private Double quantileRange = null; @JsonProperty("radius-factor") @JsonInclude(Include.NON_NULL) private Double radiusFactor = null; private AppendStrategyConfiguration(AppendStrategy strategy, Double avgTransitionTolerance, Double epsilon, boolean epsilonSet, Long numSeedings, Long minSampleSize, Long k, Integer parallelize, Double quantileRange, Double radiusFactor) { this.strategy = strategy; this.avgTransitionTolerance = avgTransitionTolerance; this.epsilon = epsilon; this.epsilonSet = epsilonSet; this.numSeedings = numSeedings; this.minSampleSize = minSampleSize; this.k = k; this.parallelize = parallelize; this.quantileRange = quantileRange; this.radiusFactor = radiusFactor; } public AppendStrategyConfiguration() { } public static AppendStrategyConfiguration defaultDbscan() { return new AppendStrategyConfiguration(AppendStrategy.DBSCAN, null, 1.5, true, 10L, 10L, null, null, null, null); } public static AppendStrategyConfiguration defaultKmeans() { return new AppendStrategyConfiguration(AppendStrategy.KMEANS, null, null, false, null, null, 5L, 1, 0.9, null); } public static AppendStrategyConfiguration defaultMinimumDistance() { return new AppendStrategyConfiguration(AppendStrategy.MINIMUM_DISTANCE, null, null, false, 10L, 10L, null, null, null, 1.1); } /** * The strategy defining how to cluster/append new sessions. * * @return */ public AppendStrategy getStrategy() { return strategy; } public void setStrategy(AppendStrategy strategy) { this.strategy = strategy; } /** * The average transition tolerance for the DBSCAN algorithm. Corresponds to the epsilon * parameter with {@code n} = number of endpoints: * {@code epsilon = (n + 1) * avgTransitionTolerance}. * * @return */ public Double getAvgTransitionTolerance() { return avgTransitionTolerance; } public void setAvgTransitionTolerance(double avgTransitionTolerance) { this.avgTransitionTolerance = avgTransitionTolerance; if (!this.epsilonSet) { this.epsilon = null; } } /** * The epsilon parameter for the DBSCAN algorithm. Alternative to * {@link #getAvgTransitionTolerance()}. * * @return */ public Double getEpsilon() { return epsilon; } public void setEpsilon(double epsilon) { this.epsilon = epsilon; this.epsilonSet = true; } /** * The min sample size parameter for the DBSCAN algorithm. * * @return */ public Long getMinSampleSize() { return minSampleSize; } public void setMinSampleSize(long minSampleSize) { this.minSampleSize = minSampleSize; } /** * The parameter of the KMeans algorithm. * * @return */ public Long getK() { return k; } public void setK(long k) { this.k = k; } /** * The maximum number of iterations KMeans is allowed to run. * * @return */ public Long getMaxIterations() { return maxIterations; } public void setMaxIterations(long maxIterations) { this.maxIterations = maxIterations; } /** * The number of times KMeans is executed with different centroid seedings. * * @return */ public Long getNumSeedings() { return numSeedings; } public void setNumSeedings(long numSeedings) { this.numSeedings = numSeedings; } /** * The tolerance for declaring convergence in KMeans. * @return */ public Double getConvergenceTolerance() { return convergenceTolerance; } public void setConvergenceTolerance(double convergenceTolerance) { this.convergenceTolerance = convergenceTolerance; } /** * The number of jobs to run in parallel. Negative numbers mean * {@code num_cpus + 1 + parallelize}, i.e., -1 means all CPUs, -2 all except for one, etc. * Currently, only supported by kmeans. * * @return */ public Integer getParallelize() { return parallelize; } public void setParallelize(int parallelize) { this.parallelize = parallelize; } /** * The quantile range used for outlier filtering. All sessions with a distance higher than * {@code q[0.5 + quantileRange] + 1.5 * (q[0.5 + quantileRange] - q[0.5 - quantileRange])} will * be filtered. * * @return */ public Double getQuantileRange() { return quantileRange; } public void setQuantileRange(Double quantileRange) { this.quantileRange = quantileRange; } /** * The factor to be multiplied with each cluster radius to decide whether a new session belongs * to the cluster. Should be 1.0 or larger (less than 1.0 is allowed but highly discouraged). * * @return */ public Double getRadiusFactor() { return radiusFactor; } public void setRadiusFactor(Double radiusFactor) { this.radiusFactor = radiusFactor; } }
// Copyright 2017 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.chrome.browser.media; import android.app.PendingIntent; import android.content.ComponentName; import android.content.ContentResolver; import android.content.Context; import android.content.Intent; import android.content.RestrictionsManager; import android.content.pm.PackageManager; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.Color; import android.net.Uri; import android.os.Build; import android.provider.Browser; import android.text.TextUtils; import androidx.browser.customtabs.CustomTabsIntent; import org.chromium.base.ApiCompatibilityUtils; import org.chromium.base.ContextUtils; import org.chromium.base.IntentUtils; import org.chromium.base.SysUtils; import org.chromium.base.task.PostTask; import org.chromium.base.task.TaskTraits; import org.chromium.chrome.R; import org.chromium.chrome.browser.browserservices.intents.BrowserServicesIntentDataProvider.CustomTabsUiType; import org.chromium.chrome.browser.customtabs.CustomTabIntentDataProvider; import org.chromium.chrome.browser.document.ChromeLauncherActivity; import org.chromium.chrome.browser.flags.ChromeFeatureList; import org.chromium.ui.util.ColorUtils; import java.util.Locale; /** * A class containing some utility static methods. */ public class MediaViewerUtils { private static final String DEFAULT_MIME_TYPE = "*/*"; private static final String MIMETYPE_AUDIO = "audio"; private static final String MIMETYPE_IMAGE = "image"; private static final String MIMETYPE_VIDEO = "video"; private static boolean sIsMediaLauncherActivityForceEnabledForTest; /** * Creates an Intent that allows viewing the given file in an internal media viewer. * @param displayUri URI to display to the user, ideally in file:// form. * @param contentUri content:// URI pointing at the file. * @param mimeType MIME type of the file. * @param allowExternalAppHandlers Whether the viewer should allow the user to open with another * app. * @return Intent that can be fired to open the file. */ public static Intent getMediaViewerIntent(Uri displayUri, Uri contentUri, String mimeType, boolean allowExternalAppHandlers, Context context) { Bitmap closeIcon = BitmapFactory.decodeResource( context.getResources(), R.drawable.ic_arrow_back_white_24dp); Bitmap shareIcon = BitmapFactory.decodeResource( context.getResources(), R.drawable.ic_share_white_24dp); CustomTabsIntent.Builder builder = new CustomTabsIntent.Builder(); builder.setToolbarColor(Color.BLACK); builder.setCloseButtonIcon(closeIcon); builder.setShowTitle(true); builder.setColorScheme(ColorUtils.inNightMode(context) ? CustomTabsIntent.COLOR_SCHEME_DARK : CustomTabsIntent.COLOR_SCHEME_LIGHT); if (allowExternalAppHandlers && !willExposeFileUri(contentUri)) { // Create a PendingIntent that can be used to view the file externally. // TODO(https://crbug.com/795968): Check if this is problematic in multi-window mode, // where two different viewers could be visible at the // same time. Intent viewIntent = createViewIntentForUri(contentUri, mimeType, null, null); Intent chooserIntent = Intent.createChooser(viewIntent, null); chooserIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); String openWithStr = context.getString(R.string.download_manager_open_with); PendingIntent pendingViewIntent = PendingIntent.getActivity(context, 0, chooserIntent, PendingIntent.FLAG_CANCEL_CURRENT | IntentUtils.getPendingIntentMutabilityFlag(true)); builder.addMenuItem(openWithStr, pendingViewIntent); } // Create a PendingIntent that shares the file with external apps. // If the URI is a file URI and the Android version is N or later, this will throw a // FileUriExposedException. In this case, we just don't add the share button. if (!willExposeFileUri(contentUri)) { PendingIntent pendingShareIntent = PendingIntent.getActivity(context, 0, createShareIntent(contentUri, mimeType), PendingIntent.FLAG_CANCEL_CURRENT | IntentUtils.getPendingIntentMutabilityFlag(true)); builder.setActionButton( shareIcon, context.getString(R.string.share), pendingShareIntent, true); } // The color of the media viewer is dependent on the file type. int backgroundRes; if (isImageType(mimeType)) { backgroundRes = R.color.image_viewer_bg; } else { backgroundRes = R.color.media_viewer_bg; } int mediaColor = ApiCompatibilityUtils.getColor(context.getResources(), backgroundRes); // Build up the Intent further. Intent intent = builder.build().intent; intent.setPackage(context.getPackageName()); intent.setData(contentUri); intent.putExtra(CustomTabIntentDataProvider.EXTRA_UI_TYPE, CustomTabsUiType.MEDIA_VIEWER); intent.putExtra(CustomTabIntentDataProvider.EXTRA_MEDIA_VIEWER_URL, displayUri.toString()); intent.putExtra(CustomTabIntentDataProvider.EXTRA_ENABLE_EMBEDDED_MEDIA_EXPERIENCE, true); intent.putExtra(CustomTabIntentDataProvider.EXTRA_INITIAL_BACKGROUND_COLOR, mediaColor); intent.putExtra(CustomTabsIntent.EXTRA_TOOLBAR_COLOR, mediaColor); intent.putExtra(Browser.EXTRA_APPLICATION_ID, context.getPackageName()); IntentUtils.addTrustedIntentExtras(intent); intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); intent.setClass(context, ChromeLauncherActivity.class); return intent; } /** * Creates an Intent to open the file in another app by firing an Intent to Android. * @param fileUri Uri pointing to the file. * @param mimeType MIME type for the file. * @param originalUrl The original url of the downloaded file. * @param referrer Referrer of the downloaded file. * @return Intent that can be used to start an Activity for the file. */ public static Intent createViewIntentForUri( Uri fileUri, String mimeType, String originalUrl, String referrer) { Intent fileIntent = new Intent(Intent.ACTION_VIEW); String normalizedMimeType = Intent.normalizeMimeType(mimeType); if (TextUtils.isEmpty(normalizedMimeType)) { fileIntent.setData(fileUri); } else { fileIntent.setDataAndType(fileUri, normalizedMimeType); } fileIntent.addFlags(Intent.FLAG_GRANT_READ_URI_PERMISSION); fileIntent.addFlags(Intent.FLAG_GRANT_WRITE_URI_PERMISSION); fileIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); setOriginalUrlAndReferralExtraToIntent(fileIntent, originalUrl, referrer); return fileIntent; } /** * Adds the originating Uri and referrer extras to an intent if they are not null. * @param intent Intent for adding extras. * @param originalUrl The original url of the downloaded file. * @param referrer Referrer of the downloaded file. */ public static void setOriginalUrlAndReferralExtraToIntent( Intent intent, String originalUrl, String referrer) { if (originalUrl != null) { intent.putExtra(Intent.EXTRA_ORIGINATING_URI, Uri.parse(originalUrl)); } if (referrer != null) intent.putExtra(Intent.EXTRA_REFERRER, Uri.parse(referrer)); } /** * Determines the media type from the given MIME type. * @param mimeType The MIME type to check. * @return MediaLauncherActivity.MediaType enum value for determined media type. */ static int getMediaTypeFromMIMEType(String mimeType) { if (TextUtils.isEmpty(mimeType)) return MediaLauncherActivity.MediaType.UNKNOWN; String[] pieces = mimeType.toLowerCase(Locale.getDefault()).split("/"); if (pieces.length != 2) return MediaLauncherActivity.MediaType.UNKNOWN; switch (pieces[0]) { case MIMETYPE_AUDIO: return MediaLauncherActivity.MediaType.AUDIO; case MIMETYPE_IMAGE: return MediaLauncherActivity.MediaType.IMAGE; case MIMETYPE_VIDEO: return MediaLauncherActivity.MediaType.VIDEO; default: return MediaLauncherActivity.MediaType.UNKNOWN; } } /** * Selectively enables or disables the MediaLauncherActivity. */ public static void updateMediaLauncherActivityEnabled() { PostTask.postTask(TaskTraits.BEST_EFFORT_MAY_BLOCK, () -> { synchronousUpdateMediaLauncherActivityEnabled(); }); } static void synchronousUpdateMediaLauncherActivityEnabled() { Context context = ContextUtils.getApplicationContext(); PackageManager packageManager = context.getPackageManager(); ComponentName mediaComponentName = new ComponentName(context, MediaLauncherActivity.class); ComponentName audioComponentName = new ComponentName( context, "org.chromium.chrome.browser.media.AudioLauncherActivity"); int newMediaState = shouldEnableMediaLauncherActivity() ? PackageManager.COMPONENT_ENABLED_STATE_ENABLED : PackageManager.COMPONENT_ENABLED_STATE_DISABLED; int newAudioState = shouldEnableAudioLauncherActivity() ? PackageManager.COMPONENT_ENABLED_STATE_ENABLED : PackageManager.COMPONENT_ENABLED_STATE_DISABLED; // This indicates that we don't want to kill Chrome when changing component enabled // state. int flags = PackageManager.DONT_KILL_APP; if (packageManager.getComponentEnabledSetting(mediaComponentName) != newMediaState) { packageManager.setComponentEnabledSetting(mediaComponentName, newMediaState, flags); } if (packageManager.getComponentEnabledSetting(audioComponentName) != newAudioState) { packageManager.setComponentEnabledSetting(audioComponentName, newAudioState, flags); } } /** * Force MediaLauncherActivity to be enabled for testing. */ public static void forceEnableMediaLauncherActivityForTest() { sIsMediaLauncherActivityForceEnabledForTest = true; // Synchronously update to avoid race conditions in tests. synchronousUpdateMediaLauncherActivityEnabled(); } /** * Stops forcing MediaLauncherActivity to be enabled for testing. */ public static void stopForcingEnableMediaLauncherActivityForTest() { sIsMediaLauncherActivityForceEnabledForTest = false; // Synchronously update to avoid race conditions in tests. synchronousUpdateMediaLauncherActivityEnabled(); } private static boolean shouldEnableMediaLauncherActivity() { return sIsMediaLauncherActivityForceEnabledForTest || ((SysUtils.isAndroidGo() || isEnterpriseManaged()) && ChromeFeatureList.isEnabled(ChromeFeatureList.HANDLE_MEDIA_INTENTS)); } private static boolean shouldEnableAudioLauncherActivity() { return shouldEnableMediaLauncherActivity() && !SysUtils.isAndroidGo(); } private static boolean isEnterpriseManaged() { RestrictionsManager restrictionsManager = (RestrictionsManager) ContextUtils.getApplicationContext().getSystemService( Context.RESTRICTIONS_SERVICE); return restrictionsManager.hasRestrictionsProvider() || !restrictionsManager.getApplicationRestrictions().isEmpty(); } private static Intent createShareIntent(Uri fileUri, String mimeType) { if (TextUtils.isEmpty(mimeType)) mimeType = DEFAULT_MIME_TYPE; Intent intent = new Intent(Intent.ACTION_SEND); intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); intent.addFlags(Intent.FLAG_GRANT_READ_URI_PERMISSION); intent.putExtra(Intent.EXTRA_STREAM, fileUri); intent.setType(mimeType); return intent; } private static boolean isImageType(String mimeType) { if (TextUtils.isEmpty(mimeType)) return false; String[] pieces = mimeType.toLowerCase(Locale.getDefault()).split("/"); if (pieces.length != 2) return false; return MIMETYPE_IMAGE.equals(pieces[0]); } private static boolean willExposeFileUri(Uri uri) { assert uri != null && !uri.equals(Uri.EMPTY) : "URI is not successfully generated."; // On Android N and later, an Exception is thrown if we try to expose a file:// URI. return uri.getScheme().equals(ContentResolver.SCHEME_FILE) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.N; } }
/******************************************************************************* * # Copyright 2015 InfinitiesSoft Solutions Inc. * # * # Licensed under the Apache License, Version 2.0 (the "License"); you may * # not use this file except in compliance with the License. You may obtain * # a copy of the License at * # * # http://www.apache.org/licenses/LICENSE-2.0 * # * # Unless required by applicable law or agreed to in writing, software * # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * # License for the specific language governing permissions and limitations * # under the License. *******************************************************************************/ package com.infinities.keystone4j.model.identity; import java.util.HashSet; import java.util.Set; import javax.persistence.CascadeType; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.FetchType; import javax.persistence.JoinColumn; import javax.persistence.Lob; import javax.persistence.ManyToOne; import javax.persistence.OneToMany; import javax.persistence.Table; import javax.persistence.Transient; import javax.persistence.UniqueConstraint; import javax.validation.constraints.NotNull; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlTransient; import com.fasterxml.jackson.annotation.JsonView; import com.infinities.keystone4j.model.BaseEntity; import com.infinities.keystone4j.model.DomainAwared; import com.infinities.keystone4j.model.assignment.Domain; import com.infinities.keystone4j.model.assignment.Project; import com.infinities.keystone4j.model.utils.Views; @Entity @Table(name = "USERS", uniqueConstraints = { @UniqueConstraint(columnNames = { "DOMAINID", "NAME" }) }) public class User extends BaseEntity implements java.io.Serializable, DomainAwared, IUser { /** * */ private static final long serialVersionUID = -6436954503286770674L; private Domain domain; // keystone.identity.backends.sql.User 20150114 @NotNull(message = "name field is required and cannot be empty") private String name; // private String email; // TODO filter private String password; private Boolean enabled = true; // TODO filter private String extra; // @XmlElement(name = "default_project") private Project defaultProject; // TODO filter private Set<UserGroupMembership> userGroupMemberships = new HashSet<UserGroupMembership>(0); // TODO filter // private Set<UserProjectGrant> userProjectGrants = new // HashSet<UserProjectGrant>(0); // TODO filter // private Set<UserDomainGrant> userDomainGrants = new // HashSet<UserDomainGrant>(0); // private Set<Credential> credentials = new HashSet<Credential>(0); // private final Set<Trust> trustsOwn = new HashSet<Trust>(0); // private final Set<Trust> trustsProvide = new HashSet<Trust>(0); // private Set<Token> tokens = new HashSet<Token>(0); private boolean nameUpdated = false; // private boolean emailUpdated = false; private boolean domainUpdated = false; private boolean passwordUpdated = false; private boolean enabledUpdated = false; private boolean extraUpdated = false; private boolean defaultProjectUpdated = false; private String tenantId; private String username; private String firstName; private String lastName; private boolean firstNameUpdated = false; private boolean lastNameUpdated = false; private String ip; private Integer port; private boolean ipUpdated = false; private boolean portUpdated = false; // private Set<Assignment> assignments = new HashSet<Assignment>(0); @Override @Column(name = "NAME", length = 255, nullable = false) @JsonView(Views.Basic.class) public String getName() { return name; } public void setName(String name) { this.name = name; nameUpdated = true; } // @Column(name = "EMAIL", length = 255, nullable = false) // @JsonView(Views.Basic.class) // public String getEmail() { // return email; // } // // public void setEmail(String email) { // this.email = email; // emailUpdated = true; // } // listUser @Override @JsonView(Views.AuthenticateForToken.class) @ManyToOne(fetch = FetchType.LAZY) @JoinColumn(name = "DOMAINID", nullable = false) public Domain getDomain() { return domain; } @Override public void setDomain(Domain domain) { this.domain = domain; domainUpdated = true; } @JsonView(Views.Advance.class) @Transient @Override @XmlElement(name = "domain_id") public String getDomainId() { if (getDomain() != null) { return getDomain().getId(); } return null; } @Override @Transient @XmlElement(name = "domain_id") public void setDomainId(String domainid) { if (!(domainid == null || domainid.length() == 0)) { Domain domain = new Domain(); domain.setId(domainid); setDomain(domain); } } @JsonView(Views.All.class) @Column(name = "PASSWORD", length = 128) public String getPassword() { return password; } public void setPassword(String password) { this.password = password; passwordUpdated = true; } @JsonView(Views.Advance.class) @Column(name = "ENABLED", nullable = false) public Boolean getEnabled() { return enabled; } public void setEnabled(Boolean enabled) { this.enabled = enabled; enabledUpdated = true; } @XmlTransient @Column(name = "EXTRA") @JsonView(Views.All.class) public String getExtra() { return extra; } public void setExtra(String extra) { this.extra = extra; extraUpdated = true; } @XmlTransient @ManyToOne(fetch = FetchType.LAZY) @JoinColumn(name = "DEFAULTPROJECTID", nullable = false) public Project getDefaultProject() { return defaultProject; } @XmlTransient public void setDefaultProject(Project defaultProject) { this.defaultProject = defaultProject; defaultProjectUpdated = true; } @OneToMany(fetch = FetchType.LAZY, mappedBy = "user", cascade = CascadeType.ALL) @JsonView(Views.All.class) public Set<UserGroupMembership> getUserGroupMemberships() { return userGroupMemberships; } public void setUserGroupMemberships(Set<UserGroupMembership> userGroupMemberships) { this.userGroupMemberships = userGroupMemberships; } // @JsonView(Views.All.class) // @OneToMany(fetch = FetchType.LAZY, mappedBy = "user", cascade = // CascadeType.ALL) // public Set<UserProjectGrant> getUserProjectGrants() { // return userProjectGrants; // } // // public void setUserProjectGrants(Set<UserProjectGrant> userProjectGrants) // { // this.userProjectGrants = userProjectGrants; // } // // @JsonView(Views.All.class) // @OneToMany(fetch = FetchType.LAZY, mappedBy = "user", cascade = // CascadeType.ALL) // public Set<UserDomainGrant> getUserDomainGrants() { // return userDomainGrants; // } // // public void setUserDomainGrants(Set<UserDomainGrant> userDomainGrants) { // this.userDomainGrants = userDomainGrants; // } // @JsonView(Views.All.class) // @OneToMany(fetch = FetchType.LAZY, mappedBy = "user", cascade = // CascadeType.ALL) // public Set<Credential> getCredentials() { // return credentials; // } // // public void setCredentials(Set<Credential> credentials) { // this.credentials = credentials; // } // @JsonView(Views.All.class) // @OneToMany(fetch = FetchType.LAZY, mappedBy = "trustee", cascade = // CascadeType.ALL) // public Set<Trust> getTrustsOwn() { // return trustsOwn; // } // // public void setTrustsOwn(Set<Trust> trustsOwn) { // this.trustsOwn = trustsOwn; // } // // @JsonView(Views.All.class) // @OneToMany(fetch = FetchType.LAZY, mappedBy = "trustor", cascade = // CascadeType.ALL) // public Set<Trust> getTrustsProvide() { // return trustsProvide; // } // // public void setTrustsProvide(Set<Trust> trustsProvide) { // this.trustsProvide = trustsProvide; // } // @JsonView(Views.All.class) // @OneToMany(fetch = FetchType.LAZY, mappedBy = "user", cascade = // CascadeType.ALL) // public Set<Token> getTokens() { // return tokens; // } // // public void setTokens(Set<Token> tokens) { // this.tokens = tokens; // } @XmlTransient @Transient public boolean isNameUpdated() { return nameUpdated; } @XmlTransient @Transient public void setNameUpdated(boolean nameUpdated) { this.nameUpdated = nameUpdated; } // @XmlTransient // @Transient // public boolean isEmailUpdated() { // return emailUpdated; // } // // @XmlTransient // @Transient // public void setEmailUpdated(boolean emailUpdated) { // this.emailUpdated = emailUpdated; // } @XmlTransient @Transient public boolean isDomainUpdated() { return domainUpdated; } @XmlTransient @Transient public void setDomainUpdated(boolean domainUpdated) { this.domainUpdated = domainUpdated; } @XmlTransient @Transient public boolean isPasswordUpdated() { return passwordUpdated; } @XmlTransient @Transient public void setPasswordUpdated(boolean passwordUpdated) { this.passwordUpdated = passwordUpdated; } @XmlTransient @Transient public boolean isEnabledUpdated() { return enabledUpdated; } @XmlTransient @Transient public void setEnabledUpdated(boolean enabledUpdated) { this.enabledUpdated = enabledUpdated; } @XmlTransient @Transient public boolean isExtraUpdated() { return extraUpdated; } @XmlTransient @Transient public void setExtraUpdated(boolean extraUpdated) { this.extraUpdated = extraUpdated; } @XmlTransient @Transient public boolean isDefaultProjectUpdated() { return defaultProjectUpdated; } @XmlTransient @Transient public void setDefaultProjectUpdated(boolean defaultProjectUpdated) { this.defaultProjectUpdated = defaultProjectUpdated; } // listUser @JsonView(Views.Advance.class) @Transient @XmlElement(name = "default_project_id") public String getDefaultProjectId() { if (getDefaultProject() == null) { return null; } else { return getDefaultProject().getId(); } } @Transient @XmlElement(name = "default_project_id") public void setDefaultProjectId(String defaultProjectId) { Project project = new Project(); project.setId(defaultProjectId); setDefaultProject(project); } @Transient public String getTenantId() { return tenantId; } @JsonView(Views.Advance.class) @Transient public void setTenantId(String tenantId) { this.tenantId = tenantId; } @JsonView(Views.Advance.class) @Transient public String getUsername() { return username; } @Transient public void setUsername(String username) { this.username = username; } @Override public String toString() { return "User [name=" + name + ", domain=" + domain + ", password=" + password + ", enabled=" + enabled + ", extra=" + extra + ", defaultProject=" + defaultProject + ", userGroupMemberships=" + userGroupMemberships + ", tenantId=" + tenantId + ", username=" + username + "]"; } @Column(name = "FIRSTNAME", length = 50, nullable = true) public String getFirstName() { return firstName; } public void setFirstName(String firstName) { this.firstNameUpdated = true; this.firstName = firstName; } @Column(name = "LASTNAME", length = 50, nullable = true) public String getLastName() { return lastName; } public void setLastName(String lastName) { this.lastNameUpdated = true; this.lastName = lastName; } @XmlTransient @Transient public boolean isFirstNameUpdated() { return firstNameUpdated; } public void setFirstNameUpdated(boolean firstNameUpdated) { this.firstNameUpdated = firstNameUpdated; } @XmlTransient @Transient public boolean isLastNameUpdated() { return lastNameUpdated; } public void setLastNameUpdated(boolean lastNameUpdated) { this.lastNameUpdated = lastNameUpdated; } @Column(name = "ip", length = 50, nullable = true) @JsonView(Views.Basic.class) public String getIp() { return ip; } public void setIp(String ip) { this.ipUpdated = true; this.ip = ip; } @Column(name = "port", nullable = true) @JsonView(Views.Basic.class) public Integer getPort() { return port; } public void setPort(Integer port) { this.portUpdated = true; this.port = port; } @XmlTransient @Transient public boolean isIpUpdated() { return ipUpdated; } public void setIpUpdated(boolean ipUpdated) { this.ipUpdated = ipUpdated; } @XmlTransient @Transient public boolean isPortUpdated() { return portUpdated; } public void setPortUpdated(boolean portUpdated) { this.portUpdated = portUpdated; } // @OneToMany(fetch = FetchType.LAZY, mappedBy = "user", cascade = // CascadeType.ALL) // public Set<Assignment> getAssignments() { // return assignments; // } // // public void setAssignments(Set<Assignment> assignments) { // this.assignments = assignments; // } }
/** * Copyright 2013 OpenSocial Foundation * Copyright 2013 International Business Machines Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * Utility library for working with Activity Streams Actions * Requires underscorejs. * * @author James M Snell (jasnell@us.ibm.com) */ package com.ibm.common.activitystreams.internal; import static com.google.gson.internal.bind.TypeAdapters.NUMBER; import static com.ibm.common.activitystreams.internal.Adapters.DATE; import static com.ibm.common.activitystreams.internal.Adapters.DATETIME; import static com.ibm.common.activitystreams.internal.Adapters.NLV; import static com.ibm.common.activitystreams.internal.Adapters.TABLE; import static com.ibm.common.activitystreams.internal.Adapters.OPTIONAL; import static com.ibm.common.activitystreams.internal.Adapters.ACTIONS; import static com.ibm.common.activitystreams.internal.Adapters.DURATION; import static com.ibm.common.activitystreams.internal.Adapters.INTERVAL; import static com.ibm.common.activitystreams.internal.Adapters.ITERABLE; import static com.ibm.common.activitystreams.internal.Adapters.MIMETYPE; import static com.ibm.common.activitystreams.internal.Adapters.MULTIMAP; import static com.ibm.common.activitystreams.internal.Adapters.RANGE; import static com.ibm.common.activitystreams.internal.Adapters.PERIOD; import static com.ibm.common.activitystreams.internal.Adapters.forEnum; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.Reader; import java.io.StringWriter; import java.io.Writer; import java.util.Date; import org.joda.time.DateTime; import org.joda.time.ReadableDuration; import org.joda.time.ReadableInterval; import org.joda.time.ReadablePeriod; import com.google.common.base.Optional; import com.google.common.base.Supplier; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.Multimap; import com.google.common.collect.Range; import com.google.common.collect.Table; import com.google.common.net.MediaType; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.internal.LazilyParsedNumber; import com.ibm.common.activitystreams.ASObject; import com.ibm.common.activitystreams.ActionsValue; import com.ibm.common.activitystreams.Activity; import com.ibm.common.activitystreams.Collection; import com.ibm.common.activitystreams.LinkValue; import com.ibm.common.activitystreams.NLV; import com.ibm.common.activitystreams.TypeValue; import com.ibm.common.activitystreams.Writable; /** * @author james * @version $Revision: 1.0 $ */ public final class GsonWrapper { /** * Method make. * @return Builder */ public static final Builder make() { return new Builder(); } /** * @author james * @version $Revision: 1.0 $ */ public static final class Builder implements Supplier<GsonWrapper> { private String charset = "UTF-8"; private boolean pretty; private Schema schema = null; // default private ImmutableList.Builder<AdapterEntry<?>> adapters = ImmutableList.builder(); /** * Method charset. * @param charset String * @return Builder */ public Builder charset(String charset) { this.charset = charset; return this; } /** * Method schema. * @param schema Schema * @return Builder */ public Builder schema(Schema schema) { this.schema = schema; return this; } /** * Method adapter. * @param type Class<? extends T> * @param adapter Adapter<T> * @return Builder */ public <T>Builder adapter( Class<? extends T> type, Adapter<T> adapter) { return adapter(type,adapter,false); } /** * Method adapter. * @param type Class<? extends T> * @param adapter Adapter<T> * @param hier boolean * @return Builder */ public <T>Builder adapter( Class<? extends T> type, Adapter<T> adapter, boolean hier) { adapters.add(new AdapterEntry<T>(type,adapter,hier)); return this; } /** * Method prettyPrint. * @param on boolean * @return Builder */ public Builder prettyPrint(boolean on) { this.pretty = on; return this; } /** * Method prettyPrint. * @return Builder */ public Builder prettyPrint() { return prettyPrint(true); } /** * Method get. * @return GsonWrapper * @see com.google.common.base.Supplier#get() */ public GsonWrapper get() { return new GsonWrapper(this); } } /** * @author james * @version $Revision: 1.0 $ */ private final static class AdapterEntry<T> { private final Class<? extends T> type; private final Adapter<T> adapter; private final boolean hier; /** * Constructor for AdapterEntry. * @param type Class<? extends T> * @param adapter Adapter<T> * @param hier boolean */ AdapterEntry( Class<? extends T> type, Adapter<T> adapter, boolean hier) { this.type = type; this.adapter = adapter; this.hier = hier; } } private final Gson gson; private final String charset; /** * Constructor for GsonWrapper. * @param builder Builder */ protected GsonWrapper(Builder builder) { Schema schema = builder.schema != null ? builder.schema : Schema.make().get(); ASObjectAdapter base = new ASObjectAdapter(schema); GsonBuilder b = initGsonBuilder( builder, schema, base, builder.adapters.build()); if (builder.pretty) b.setPrettyPrinting(); this.gson = b.create(); this.charset = builder.charset; } /** * Method initGsonBuilder. * @param builder Builder * @return GsonBuilder */ private static GsonBuilder initGsonBuilder( Builder builder, Schema schema, ASObjectAdapter base, Iterable<AdapterEntry<?>> adapters) { GsonBuilder gson = new GsonBuilder() .registerTypeHierarchyAdapter(TypeValue.class, new TypeValueAdapter(schema)) .registerTypeHierarchyAdapter(LinkValue.class, new LinkValueAdapter(schema)) .registerTypeHierarchyAdapter(Iterable.class, ITERABLE) .registerTypeHierarchyAdapter(ASObject.class, base) .registerTypeHierarchyAdapter(Collection.class, base) .registerTypeHierarchyAdapter(Activity.class, base) .registerTypeHierarchyAdapter(NLV.class, NLV) .registerTypeHierarchyAdapter(ActionsValue.class, ACTIONS) .registerTypeHierarchyAdapter(Optional.class, OPTIONAL) .registerTypeHierarchyAdapter(Range.class, RANGE) .registerTypeHierarchyAdapter(Table.class, TABLE) .registerTypeHierarchyAdapter(LazilyParsedNumber.class, NUMBER) .registerTypeHierarchyAdapter(LazilyParsedNumberComparable.class, NUMBER) .registerTypeHierarchyAdapter(ReadableDuration.class, DURATION) .registerTypeHierarchyAdapter(ReadablePeriod.class, PERIOD) .registerTypeHierarchyAdapter(ReadableInterval.class, INTERVAL) .registerTypeAdapter( Activity.Status.class, forEnum( Activity.Status.class, Activity.Status.OTHER)) .registerTypeAdapter(Date.class, DATE) .registerTypeAdapter(DateTime.class, DATETIME) .registerTypeAdapter(MediaType.class, MIMETYPE) .registerTypeHierarchyAdapter(Multimap.class, MULTIMAP); for (AdapterEntry<?> entry : adapters) { if (entry.hier) gson.registerTypeHierarchyAdapter( entry.type, entry.adapter!=null ? entry.adapter : base); else gson.registerTypeAdapter( entry.type, entry.adapter!=null ? entry.adapter:base); } return gson; } /** * Method write. * @param w Writable * @param out OutputStream */ public void write(Writable w, OutputStream out) { try { OutputStreamWriter wout = new OutputStreamWriter(out, charset); gson.toJson(w,wout); wout.flush(); } catch (Throwable t) { throw Throwables.propagate(t); } } /** * Method write. * @param w Writable * @param out Writer */ public void write(Writable w, Writer out) { gson.toJson(w,out); } /** * Method write. * @param w Writable * @return String */ public String write(Writable w) { StringWriter sw = new StringWriter(); write(w,sw); return sw.toString(); } /** * Method readAs. * @param in InputStream * @param type Class<? extends A> * @return A */ public <A extends ASObject>A readAs(InputStream in, Class<? extends A> type) { try { return readAs(new InputStreamReader(in, charset), type); } catch (Throwable t) { throw Throwables.propagate(t); } } /** * Method readAs. * @param in Reader * @param type Class<? extends A> * @return A */ public <A extends ASObject>A readAs(Reader in, Class<? extends A> type) { return (A)gson.fromJson(in, type); } }
/* * Copyright 2012-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.build; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.InputStreamReader; import java.io.PrintWriter; import java.util.Collections; import java.util.Map; import java.util.jar.Attributes; import java.util.jar.JarEntry; import java.util.jar.JarFile; import org.gradle.testkit.runner.BuildResult; import org.gradle.testkit.runner.GradleRunner; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.io.TempDir; import org.springframework.util.FileCopyUtils; import static org.assertj.core.api.Assertions.assertThat; /** * Integration tests for {@link ConventionsPlugin}. * * @author Christoph Dreis */ class ConventionsPluginTests { private File projectDir; private File buildFile; @BeforeEach void setup(@TempDir File projectDir) throws IOException { this.projectDir = projectDir; this.buildFile = new File(this.projectDir, "build.gradle"); File settingsFile = new File(this.projectDir, "settings.gradle"); try (PrintWriter out = new PrintWriter(new FileWriter(settingsFile))) { out.println("include ':spring-boot-project:spring-boot-parent'"); } File springBootParent = new File(this.projectDir, "spring-boot-project/spring-boot-parent/build.gradle"); springBootParent.getParentFile().mkdirs(); try (PrintWriter out = new PrintWriter(new FileWriter(springBootParent))) { out.println("plugins {"); out.println(" id 'java-platform'"); out.println("}"); } } @Test void jarIncludesLegalFiles() throws IOException { try (PrintWriter out = new PrintWriter(new FileWriter(this.buildFile))) { out.println("plugins {"); out.println(" id 'java'"); out.println(" id 'org.springframework.boot.conventions'"); out.println("}"); out.println("version = '1.2.3'"); out.println("sourceCompatibility = '17'"); out.println("description 'Test project for manifest customization'"); out.println("jar.archiveFileName = 'test.jar'"); } runGradle("jar"); File file = new File(this.projectDir, "/build/libs/test.jar"); assertThat(file).exists(); try (JarFile jar = new JarFile(file)) { assertThatLicenseIsPresent(jar); assertThatNoticeIsPresent(jar); Attributes mainAttributes = jar.getManifest().getMainAttributes(); assertThat(mainAttributes.getValue("Implementation-Title")) .isEqualTo("Test project for manifest customization"); assertThat(mainAttributes.getValue("Automatic-Module-Name")) .isEqualTo(this.projectDir.getName().replace("-", ".")); assertThat(mainAttributes.getValue("Implementation-Version")).isEqualTo("1.2.3"); assertThat(mainAttributes.getValue("Built-By")).isEqualTo("Spring"); assertThat(mainAttributes.getValue("Build-Jdk-Spec")).isEqualTo("17"); } } @Test void sourceJarIsBuilt() throws IOException { try (PrintWriter out = new PrintWriter(new FileWriter(this.buildFile))) { out.println("plugins {"); out.println(" id 'java'"); out.println(" id 'maven-publish'"); out.println(" id 'org.springframework.boot.conventions'"); out.println("}"); out.println("version = '1.2.3'"); out.println("sourceCompatibility = '17'"); out.println("description 'Test'"); } runGradle("assemble"); File file = new File(this.projectDir, "/build/libs/" + this.projectDir.getName() + "-1.2.3-sources.jar"); assertThat(file).exists(); try (JarFile jar = new JarFile(file)) { assertThatLicenseIsPresent(jar); assertThatNoticeIsPresent(jar); Attributes mainAttributes = jar.getManifest().getMainAttributes(); assertThat(mainAttributes.getValue("Implementation-Title")) .isEqualTo("Source for " + this.projectDir.getName()); assertThat(mainAttributes.getValue("Automatic-Module-Name")) .isEqualTo(this.projectDir.getName().replace("-", ".")); assertThat(mainAttributes.getValue("Implementation-Version")).isEqualTo("1.2.3"); assertThat(mainAttributes.getValue("Built-By")).isEqualTo("Spring"); assertThat(mainAttributes.getValue("Build-Jdk-Spec")).isEqualTo("17"); } } @Test void javadocJarIsBuilt() throws IOException { try (PrintWriter out = new PrintWriter(new FileWriter(this.buildFile))) { out.println("plugins {"); out.println(" id 'java'"); out.println(" id 'maven-publish'"); out.println(" id 'org.springframework.boot.conventions'"); out.println("}"); out.println("version = '1.2.3'"); out.println("sourceCompatibility = '17'"); out.println("description 'Test'"); } runGradle("assemble"); File file = new File(this.projectDir, "/build/libs/" + this.projectDir.getName() + "-1.2.3-javadoc.jar"); assertThat(file).exists(); try (JarFile jar = new JarFile(file)) { assertThatLicenseIsPresent(jar); assertThatNoticeIsPresent(jar); Attributes mainAttributes = jar.getManifest().getMainAttributes(); assertThat(mainAttributes.getValue("Implementation-Title")) .isEqualTo("Javadoc for " + this.projectDir.getName()); assertThat(mainAttributes.getValue("Automatic-Module-Name")) .isEqualTo(this.projectDir.getName().replace("-", ".")); assertThat(mainAttributes.getValue("Implementation-Version")).isEqualTo("1.2.3"); assertThat(mainAttributes.getValue("Built-By")).isEqualTo("Spring"); assertThat(mainAttributes.getValue("Build-Jdk-Spec")).isEqualTo("17"); } } private void assertThatLicenseIsPresent(JarFile jar) { JarEntry license = jar.getJarEntry("META-INF/LICENSE.txt"); assertThat(license).isNotNull(); } private void assertThatNoticeIsPresent(JarFile jar) throws IOException { JarEntry notice = jar.getJarEntry("META-INF/NOTICE.txt"); assertThat(notice).isNotNull(); String noticeContent = FileCopyUtils.copyToString(new InputStreamReader(jar.getInputStream(notice))); // Test that variables were replaced assertThat(noticeContent).doesNotContain("${"); } @Test void testRetryIsConfiguredWithThreeRetriesOnCI() throws IOException { try (PrintWriter out = new PrintWriter(new FileWriter(this.buildFile))) { out.println("plugins {"); out.println(" id 'java'"); out.println(" id 'org.springframework.boot.conventions'"); out.println("}"); out.println("description 'Test'"); out.println("task retryConfig {"); out.println(" doLast {"); out.println(" println \"Retry plugin applied: ${plugins.hasPlugin('org.gradle.test-retry')}\""); out.println(" test.retry {"); out.println(" println \"maxRetries: ${maxRetries.get()}\""); out.println(" println \"failOnPassedAfterRetry: ${failOnPassedAfterRetry.get()}\""); out.println(" }"); out.println(" }"); out.println("}"); } assertThat(runGradle(Collections.singletonMap("CI", "true"), "retryConfig", "--stacktrace").getOutput()) .contains("Retry plugin applied: true").contains("maxRetries: 3") .contains("failOnPassedAfterRetry: true"); } @Test void testRetryIsConfiguredWithZeroRetriesLocally() throws IOException { try (PrintWriter out = new PrintWriter(new FileWriter(this.buildFile))) { out.println("plugins {"); out.println(" id 'java'"); out.println(" id 'org.springframework.boot.conventions'"); out.println("}"); out.println("description 'Test'"); out.println("task retryConfig {"); out.println(" doLast {"); out.println(" println \"Retry plugin applied: ${plugins.hasPlugin('org.gradle.test-retry')}\""); out.println(" test.retry {"); out.println(" println \"maxRetries: ${maxRetries.get()}\""); out.println(" println \"failOnPassedAfterRetry: ${failOnPassedAfterRetry.get()}\""); out.println(" }"); out.println(" }"); out.println("}"); } assertThat(runGradle(Collections.singletonMap("CI", "local"), "retryConfig", "--stacktrace").getOutput()) .contains("Retry plugin applied: true").contains("maxRetries: 0") .contains("failOnPassedAfterRetry: true"); } private BuildResult runGradle(String... args) { return runGradle(Collections.emptyMap(), args); } private BuildResult runGradle(Map<String, String> environment, String... args) { return GradleRunner.create().withProjectDir(this.projectDir).withEnvironment(environment).withArguments(args) .withPluginClasspath().build(); } }
/* * Copyright (c) 2009, 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /* @test * @bug 6866804 7006126 8028270 * @summary Unit test for java.nio.file.Files * @library .. * @build CheckPermissions * @run main/othervm CheckPermissions */ import java.nio.ByteBuffer; import java.nio.file.*; import static java.nio.file.Files.*; import static java.nio.file.StandardOpenOption.*; import java.nio.file.attribute.*; import java.nio.channels.SeekableByteChannel; import java.security.Permission; import java.io.*; import java.util.*; /** * Checks each method that accesses the file system does the right permission * check when there is a security manager set. */ public class CheckPermissions { static class Checks { private List<Permission> permissionsChecked = new ArrayList<>(); private Set<String> propertiesChecked = new HashSet<>(); private List<String> readsChecked = new ArrayList<>(); private List<String> writesChecked = new ArrayList<>(); private List<String> deletesChecked = new ArrayList<>(); private List<String> execsChecked = new ArrayList<>(); List<Permission> permissionsChecked() { return permissionsChecked; } Set<String> propertiesChecked() { return propertiesChecked; } List<String> readsChecked() { return readsChecked; } List<String> writesChecked() { return writesChecked; } List<String> deletesChecked() { return deletesChecked; } List<String> execsChecked() { return execsChecked; } } static ThreadLocal<Checks> myChecks = new ThreadLocal<Checks>() { @Override protected Checks initialValue() { return null; } }; static void prepare() { myChecks.set(new Checks()); } static void assertCheckPermission(Permission expected) { if (!myChecks.get().permissionsChecked().contains(expected)) throw new RuntimeException(expected + " not checked"); } static void assertCheckPropertyAccess(String key) { if (!myChecks.get().propertiesChecked().contains(key)) throw new RuntimeException("Property " + key + " not checked"); } static void assertChecked(Path file, List<String> list) { String s = file.toString(); for (String f: list) { if (f.endsWith(s)) return; } throw new RuntimeException("Access not checked"); } static void assertCheckRead(Path file) { assertChecked(file, myChecks.get().readsChecked()); } static void assertCheckWrite(Path file) { assertChecked(file, myChecks.get().writesChecked()); } static void assertCheckWriteToDirectory(Path dir) { String s = dir.toString(); List<String> list = myChecks.get().writesChecked(); for (String f: list) { if (f.startsWith(s)) { return; } } throw new RuntimeException("Access not checked"); } static void assertCheckDelete(Path file) { assertChecked(file, myChecks.get().deletesChecked()); } static void assertCheckExec(Path file) { assertChecked(file, myChecks.get().execsChecked()); } static class LoggingSecurityManager extends SecurityManager { static void install() { System.setSecurityManager(new LoggingSecurityManager()); } @Override public void checkPermission(Permission perm) { Checks checks = myChecks.get(); if (checks != null) checks.permissionsChecked().add(perm); } @Override public void checkPropertyAccess(String key) { Checks checks = myChecks.get(); if (checks != null) checks.propertiesChecked().add(key); } @Override public void checkRead(String file) { Checks checks = myChecks.get(); if (checks != null) checks.readsChecked().add(file); } @Override public void checkWrite(String file) { Checks checks = myChecks.get(); if (checks != null) checks.writesChecked().add(file); } @Override public void checkDelete(String file) { Checks checks = myChecks.get(); if (checks != null) checks.deletesChecked().add(file); } @Override public void checkExec(String file) { Checks checks = myChecks.get(); if (checks != null) checks.execsChecked().add(file); } } static void testBasicFileAttributeView(BasicFileAttributeView view, Path file) throws IOException { prepare(); view.readAttributes(); assertCheckRead(file); prepare(); FileTime now = FileTime.fromMillis(System.currentTimeMillis()); view.setTimes(null, now, now); assertCheckWrite(file); } static void testPosixFileAttributeView(PosixFileAttributeView view, Path file) throws IOException { prepare(); PosixFileAttributes attrs = view.readAttributes(); assertCheckRead(file); assertCheckPermission(new RuntimePermission("accessUserInformation")); prepare(); view.setPermissions(attrs.permissions()); assertCheckWrite(file); assertCheckPermission(new RuntimePermission("accessUserInformation")); prepare(); view.setOwner(attrs.owner()); assertCheckWrite(file); assertCheckPermission(new RuntimePermission("accessUserInformation")); prepare(); view.setOwner(attrs.owner()); assertCheckWrite(file); assertCheckPermission(new RuntimePermission("accessUserInformation")); } public static void main(String[] args) throws IOException { final Path testdir = Paths.get(System.getProperty("test.dir", ".")).toAbsolutePath(); final Path tmpdir = Paths.get(System.getProperty("java.io.tmpdir")); Path file = createFile(testdir.resolve("file1234")); try { LoggingSecurityManager.install(); // -- check access -- prepare(); exists(file); assertCheckRead(file); prepare(); isReadable(file); assertCheckRead(file); prepare(); isWritable(file); assertCheckWrite(file); prepare(); isExecutable(file); assertCheckExec(file); // -- copy -- Path target = testdir.resolve("target1234"); prepare(); copy(file, target); try { assertCheckRead(file); assertCheckWrite(target); } finally { delete(target); } if (TestUtil.supportsLinks(testdir)) { Path link = testdir.resolve("link1234"); createSymbolicLink(link, file); try { prepare(); copy(link, target, LinkOption.NOFOLLOW_LINKS); try { assertCheckRead(link); assertCheckWrite(target); assertCheckPermission(new LinkPermission("symbolic")); } finally { delete(target); } prepare(); readSymbolicLink(link); assertCheckPermission(new FilePermission(link.toString(), "readlink")); } finally { delete(link); } } // -- createDirectory -- Path subdir = testdir.resolve("subdir1234"); prepare(); createDirectory(subdir); try { assertCheckWrite(subdir); } finally { delete(subdir); } // -- createFile -- Path fileToCreate = testdir.resolve("file7890"); prepare(); createFile(fileToCreate); try { assertCheckWrite(fileToCreate); } finally { delete(fileToCreate); } // -- createSymbolicLink -- if (TestUtil.supportsLinks(testdir)) { prepare(); Path link = testdir.resolve("link1234"); createSymbolicLink(link, file); try { assertCheckWrite(link); assertCheckPermission(new LinkPermission("symbolic")); } finally { delete(link); } } // -- createLink -- if (TestUtil.supportsLinks(testdir)) { prepare(); Path link = testdir.resolve("entry234"); createLink(link, file); try { assertCheckWrite(link); assertCheckPermission(new LinkPermission("hard")); } finally { delete(link); } } // -- createTempFile -- prepare(); Path tmpfile1 = createTempFile("foo", null); try { assertCheckWriteToDirectory(tmpdir); } finally { delete(tmpfile1); } prepare(); Path tmpfile2 = createTempFile(testdir, "foo", ".tmp"); try { assertCheckWriteToDirectory(testdir); } finally { delete(tmpfile2); } // -- createTempDirectory -- prepare(); Path tmpdir1 = createTempDirectory("foo"); try { assertCheckWriteToDirectory(tmpdir); } finally { delete(tmpdir1); } prepare(); Path tmpdir2 = createTempDirectory(testdir, "foo"); try { assertCheckWriteToDirectory(testdir); } finally { delete(tmpdir2); } // -- delete/deleteIfExists -- Path fileToDelete = testdir.resolve("file7890"); createFile(fileToDelete); prepare(); delete(fileToDelete); assertCheckDelete(fileToDelete); createFile(fileToDelete); prepare(); deleteIfExists(fileToDelete); // file exists assertCheckDelete(fileToDelete); prepare(); deleteIfExists(fileToDelete); // file does not exist assertCheckDelete(fileToDelete); // -- exists/notExists -- prepare(); exists(file); assertCheckRead(file); prepare(); notExists(file); assertCheckRead(file); // -- getFileStore -- prepare(); getFileStore(file); assertCheckRead(file); assertCheckPermission(new RuntimePermission("getFileStoreAttributes")); // -- isSameFile -- prepare(); isSameFile(file, testdir); assertCheckRead(file); assertCheckRead(testdir); // -- move -- Path target2 = testdir.resolve("target1234"); prepare(); move(file, target2); try { assertCheckWrite(file); assertCheckWrite(target2); } finally { // restore file move(target2, file); } // -- newByteChannel -- prepare(); try (SeekableByteChannel sbc = newByteChannel(file)) { assertCheckRead(file); } prepare(); try (SeekableByteChannel sbc = newByteChannel(file, WRITE)) { assertCheckWrite(file); } prepare(); try (SeekableByteChannel sbc = newByteChannel(file, READ, WRITE)) { assertCheckRead(file); assertCheckWrite(file); } prepare(); try (SeekableByteChannel sbc = newByteChannel(file, DELETE_ON_CLOSE)) { assertCheckRead(file); assertCheckDelete(file); } createFile(file); // restore file // -- newInputStream/newOutptuStream -- prepare(); try (InputStream in = newInputStream(file)) { assertCheckRead(file); } prepare(); try (OutputStream out = newOutputStream(file)) { assertCheckWrite(file); } // -- newDirectoryStream -- prepare(); try (DirectoryStream<Path> stream = newDirectoryStream(testdir)) { assertCheckRead(testdir); if (stream instanceof SecureDirectoryStream<?>) { Path entry; SecureDirectoryStream<Path> sds = (SecureDirectoryStream<Path>)stream; // newByteChannel entry = file.getFileName(); prepare(); try (SeekableByteChannel sbc = sds.newByteChannel(entry, EnumSet.of(READ))) { assertCheckRead(file); } prepare(); try (SeekableByteChannel sbc = sds.newByteChannel(entry, EnumSet.of(WRITE))) { assertCheckWrite(file); } // deleteFile entry = file.getFileName(); prepare(); sds.deleteFile(entry); assertCheckDelete(file); createFile(testdir.resolve(entry)); // restore file // deleteDirectory entry = Paths.get("subdir1234"); createDirectory(testdir.resolve(entry)); prepare(); sds.deleteDirectory(entry); assertCheckDelete(testdir.resolve(entry)); // move entry = Paths.get("tempname1234"); prepare(); sds.move(file.getFileName(), sds, entry); assertCheckWrite(file); assertCheckWrite(testdir.resolve(entry)); sds.move(entry, sds, file.getFileName()); // restore file // newDirectoryStream entry = Paths.get("subdir1234"); createDirectory(testdir.resolve(entry)); try { prepare(); sds.newDirectoryStream(entry).close(); assertCheckRead(testdir.resolve(entry)); } finally { delete(testdir.resolve(entry)); } // getFileAttributeView to access attributes of directory testBasicFileAttributeView(sds .getFileAttributeView(BasicFileAttributeView.class), testdir); testPosixFileAttributeView(sds .getFileAttributeView(PosixFileAttributeView.class), testdir); // getFileAttributeView to access attributes of entry entry = file.getFileName(); testBasicFileAttributeView(sds .getFileAttributeView(entry, BasicFileAttributeView.class), file); testPosixFileAttributeView(sds .getFileAttributeView(entry, PosixFileAttributeView.class), file); } else { System.out.println("SecureDirectoryStream not tested"); } } // -- toAbsolutePath -- prepare(); file.getFileName().toAbsolutePath(); assertCheckPropertyAccess("user.dir"); // -- toRealPath -- prepare(); file.toRealPath(); assertCheckRead(file); prepare(); file.toRealPath(LinkOption.NOFOLLOW_LINKS); assertCheckRead(file); prepare(); Paths.get(".").toRealPath(); assertCheckPropertyAccess("user.dir"); prepare(); Paths.get(".").toRealPath(LinkOption.NOFOLLOW_LINKS); assertCheckPropertyAccess("user.dir"); // -- register -- try (WatchService watcher = FileSystems.getDefault().newWatchService()) { prepare(); testdir.register(watcher, StandardWatchEventKinds.ENTRY_DELETE); assertCheckRead(testdir); } // -- getAttribute/setAttribute/readAttributes -- prepare(); getAttribute(file, "size"); assertCheckRead(file); prepare(); setAttribute(file, "lastModifiedTime", FileTime.fromMillis(System.currentTimeMillis())); assertCheckWrite(file); prepare(); readAttributes(file, "*"); assertCheckRead(file); // -- BasicFileAttributeView -- testBasicFileAttributeView( getFileAttributeView(file, BasicFileAttributeView.class), file); // -- PosixFileAttributeView -- { PosixFileAttributeView view = getFileAttributeView(file, PosixFileAttributeView.class); if (view != null && getFileStore(file).supportsFileAttributeView(PosixFileAttributeView.class)) { testPosixFileAttributeView(view, file); } else { System.out.println("PosixFileAttributeView not tested"); } } // -- DosFileAttributeView -- { DosFileAttributeView view = getFileAttributeView(file, DosFileAttributeView.class); if (view != null && getFileStore(file).supportsFileAttributeView(DosFileAttributeView.class)) { prepare(); view.readAttributes(); assertCheckRead(file); prepare(); view.setArchive(false); assertCheckWrite(file); prepare(); view.setHidden(false); assertCheckWrite(file); prepare(); view.setReadOnly(false); assertCheckWrite(file); prepare(); view.setSystem(false); assertCheckWrite(file); } else { System.out.println("DosFileAttributeView not tested"); } } // -- FileOwnerAttributeView -- { FileOwnerAttributeView view = getFileAttributeView(file, FileOwnerAttributeView.class); if (view != null && getFileStore(file).supportsFileAttributeView(FileOwnerAttributeView.class)) { prepare(); UserPrincipal owner = view.getOwner(); assertCheckRead(file); assertCheckPermission(new RuntimePermission("accessUserInformation")); prepare(); view.setOwner(owner); assertCheckWrite(file); assertCheckPermission(new RuntimePermission("accessUserInformation")); } else { System.out.println("FileOwnerAttributeView not tested"); } } // -- UserDefinedFileAttributeView -- { UserDefinedFileAttributeView view = getFileAttributeView(file, UserDefinedFileAttributeView.class); if (view != null && getFileStore(file).supportsFileAttributeView(UserDefinedFileAttributeView.class)) { prepare(); view.write("test", ByteBuffer.wrap(new byte[100])); assertCheckWrite(file); assertCheckPermission(new RuntimePermission("accessUserDefinedAttributes")); prepare(); view.read("test", ByteBuffer.allocate(100)); assertCheckRead(file); assertCheckPermission(new RuntimePermission("accessUserDefinedAttributes")); prepare(); view.size("test"); assertCheckRead(file); assertCheckPermission(new RuntimePermission("accessUserDefinedAttributes")); prepare(); view.list(); assertCheckRead(file); assertCheckPermission(new RuntimePermission("accessUserDefinedAttributes")); prepare(); view.delete("test"); assertCheckWrite(file); assertCheckPermission(new RuntimePermission("accessUserDefinedAttributes")); } else { System.out.println("UserDefinedFileAttributeView not tested"); } } // -- AclFileAttributeView -- { AclFileAttributeView view = getFileAttributeView(file, AclFileAttributeView.class); if (view != null && getFileStore(file).supportsFileAttributeView(AclFileAttributeView.class)) { prepare(); List<AclEntry> acl = view.getAcl(); assertCheckRead(file); assertCheckPermission(new RuntimePermission("accessUserInformation")); prepare(); view.setAcl(acl); assertCheckWrite(file); assertCheckPermission(new RuntimePermission("accessUserInformation")); } else { System.out.println("AclFileAttributeView not tested"); } } // -- UserPrincipalLookupService UserPrincipalLookupService lookupService = FileSystems.getDefault().getUserPrincipalLookupService(); UserPrincipal owner = getOwner(file); prepare(); lookupService.lookupPrincipalByName(owner.getName()); assertCheckPermission(new RuntimePermission("lookupUserInformation")); try { UserPrincipal group = readAttributes(file, PosixFileAttributes.class).group(); prepare(); lookupService.lookupPrincipalByGroupName(group.getName()); assertCheckPermission(new RuntimePermission("lookupUserInformation")); } catch (UnsupportedOperationException ignore) { System.out.println("lookupPrincipalByGroupName not tested"); } } finally { deleteIfExists(file); } } }
/* * Copyright 2012 SURFnet bv, The Netherlands * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package nl.surfnet.coin.teams.service.impl; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import nl.surfnet.coin.api.client.domain.Group20; import nl.surfnet.coin.api.client.domain.Group20Entry; import nl.surfnet.coin.api.client.domain.GroupMembersEntry; import nl.surfnet.coin.api.client.domain.Person; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections.Transformer; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.dao.EmptyResultDataAccessException; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.core.RowCallbackHandler; import org.springframework.jdbc.core.RowMapper; import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate; import org.springframework.util.Assert; public class ApiGrouperDaoImpl extends AbstractGrouperDaoImpl implements ApiGrouperDao { private static final Logger LOG = LoggerFactory.getLogger(ApiGrouperDaoImpl.class); private JdbcTemplate jdbcTemplate; /* * http://static.springsource.org/spring/docs/2.5.x/reference/jdbc.html#jdbc-in * -clause */ private NamedParameterJdbcTemplate namedParameterJdbcTemplate; private static final Map<String, String> VALID_SORTS_FOR_TEAM_QUERY; static { VALID_SORTS_FOR_TEAM_QUERY = new HashMap<String, String>(); VALID_SORTS_FOR_TEAM_QUERY.put("id", "name"); VALID_SORTS_FOR_TEAM_QUERY.put("title", "display_name"); VALID_SORTS_FOR_TEAM_QUERY.put("description", "description"); } public Group20Entry findGroup20(String personId, String groupName) { Group20Entry group20Entry; Assert.notNull(personId, "The personId can not be null"); Assert.notNull(groupName, "The groupName can not be null"); try { LOG.debug("Query to grouper database for person '{}' in group '{}'", personId, groupName.toUpperCase()); group20Entry = new Group20Entry(Arrays.asList(jdbcTemplate.queryForObject( SQL_FIND_TEAM_BY_MEMBER_AND_BY_GROUPNAME, new Object[] { personId, groupName.toUpperCase() }, new OpenSocial20GroupRowMapper()))); addRolesToGroups(personId, group20Entry.getEntry()); } catch (EmptyResultDataAccessException ignored) { group20Entry = new Group20Entry(); } return group20Entry; } public Group20Entry findAllGroup20sByMember(String personId, Integer offset, Integer pageSize, String sortBy) { int rowCount = this.jdbcTemplate.queryForInt(SQL_FIND_ALL_TEAMS_BY_MEMBER_ROWCOUNT, personId); List<Group20> groups = new ArrayList<Group20>(); pageSize = correctPageSize(pageSize); offset = correctOffset(offset); try { String sql = formatAllTeamsSQLWithSortByOption(sortBy); groups = jdbcTemplate.query(sql, new Object[] { personId, pageSize, offset }, new OpenSocial20GroupRowMapper()); addRolesToGroups(personId, groups); } catch (EmptyResultDataAccessException e) { } return new Group20Entry(groups, pageSize, offset, sortBy, rowCount); } protected String formatAllTeamsSQLWithSortByOption(String sortBy) { String sql = SQL_FIND_ALL_TEAMS_BY_MEMBER_SORTED; if (!StringUtils.isBlank(sortBy)) { String sortByColumn = null; Set<Entry<String, String>> entrySet = VALID_SORTS_FOR_TEAM_QUERY.entrySet(); for (Entry<String, String> entry : entrySet) { if (entry.getKey().equals(sortBy)) { sortByColumn = entry.getValue(); break; } } Assert.isTrue(!StringUtils.isBlank(sortByColumn), "The only supported sortBy options are (" + VALID_SORTS_FOR_TEAM_QUERY.keySet() + "). Not allowed is '" + sortBy + "'"); sql = String.format(sql, sortByColumn); } else { sql = String.format(sql, "name"); } return sql; } public static class OpenSocial20GroupRowMapper extends GrouperRowMapper<Group20> { @Override public Group20 createObj(String id, String name, String description) { return new Group20(id, name, description); } } private enum Role { Manager, Admin, Member, none } private void addRolesToGroups(String personId, List<Group20> groups) { try { RolesRowCallbackHandler handler = new RolesRowCallbackHandler(); this.jdbcTemplate.query(SQL_ROLES_BY_TEAMS, new Object[] { personId }, handler); Map<String, Role> roles = handler.roles; for (Group20 group : groups) { Role role = roles.get(group.getId()); role = (role == null ? Role.Member : role); group.setVoot_membership_role(role.name().toLowerCase()); } } catch (EmptyResultDataAccessException e) { // this we can ignore } } private class RolesRowCallbackHandler implements RowCallbackHandler { protected Map<String, Role> roles; public RolesRowCallbackHandler() { super(); this.roles = new HashMap<String, Role>(); } @Override public void processRow(ResultSet rs) throws SQLException { String groupName = rs.getString("groupname"); String permission = rs.getString("fieldname"); /* * If the permission equals 'admins' then we have an Role.Admin, else we * have a role Role.Manager, but we must not overwrite a previous * Role.Admin */ Role role = roles.get(groupName); if (!Role.Admin.equals(role)) { roles.put(groupName, permission.equals("admins") ? Role.Admin : Role.Manager); } } } public void setJdbcTemplate(JdbcTemplate jdbcTemplate) { this.jdbcTemplate = jdbcTemplate; } /* * (non-Javadoc) * * @see * nl.surfnet.coin.teams.service.impl.ApiGrouperDao#findAllMembers(java.lang * .String, int, int) */ @Override public GroupMembersEntry findAllMembers(String groupId, Integer offset, Integer pageSize) { List<Person> persons = new ArrayList<Person>(); pageSize = correctPageSize(pageSize); offset = correctOffset(offset); try { RowMapper<Person> mapper = new RowMapper<Person>() { @Override public Person mapRow(ResultSet rs, int rowNum) throws SQLException { Person person = new Person(); person.setId(rs.getString(1)); return person; } }; persons = jdbcTemplate.query(SQL_MEMBERS_BY_TEAM, new Object[] { groupId, pageSize, offset }, mapper); if (CollectionUtils.isNotEmpty(persons)) { addPersonRolesToGroup(persons, groupId); } } catch (EmptyResultDataAccessException e) { // ignore as we have a sensible default } return new GroupMembersEntry(persons); } @Override public Group20Entry findGroups20ByIds(String personId, String[] groupIds, Integer pageSize, Integer offset) { Map<String, Object> params = new HashMap<String, Object>(); params.put("groupId", Arrays.asList(groupIds)); List<Group20> groups = new ArrayList<Group20>(); pageSize = correctPageSize(pageSize); offset = correctOffset(offset); params.put("limit", pageSize); params.put("offset", offset); try { String sql = SQL_FIND_TEAMS_BY_GROUPIDS; groups = namedParameterJdbcTemplate.query(sql, params, new OpenSocial20GroupRowMapper()); addRolesToGroups(personId, groups); } catch (EmptyResultDataAccessException e) { } // FIXME: rowCount != groups.size() maar aparte query voor rowcount (query bestaat al: SQL_FIND_TEAMS_BY_GROUPIDS_ROWCOUNT) return new Group20Entry(groups, pageSize, offset, null, groups.size()); } @SuppressWarnings("unchecked") private void addPersonRolesToGroup(Collection<Person> persons, String groupId) { try { RolesMembersRowCallbackHandler handler = new RolesMembersRowCallbackHandler(); Collection<String> personIds = CollectionUtils.collect(persons, new Transformer() { @Override public Object transform(Object input) { return ((Person) input).getId(); } }); Map<String, Object> params = new HashMap<String, Object>(); params.put("groupId", groupId); params.put("identifiers", personIds); namedParameterJdbcTemplate.query(SQL_ROLES_BY_TEAM_AND_MEMBERS, params, handler); for (Person person : persons) { Role role = handler.roles.get(person.getId()); role = (role == null ? Role.Member : role); person.setVoot_membership_role(role.name().toLowerCase()); } } catch (EmptyResultDataAccessException e) { // this we can ignore } } private class RolesMembersRowCallbackHandler extends RolesRowCallbackHandler { @Override public void processRow(ResultSet rs) throws SQLException { String personName = rs.getString("subject_id"); String permission = rs.getString("fieldname"); Role role = roles.get(personName); if (!Role.Admin.equals(role)) { roles.put(personName, permission.equals("admins") ? Role.Admin : Role.Manager); } } } /** * @param namedParameterJdbcTemplate * the namedParameterJdbcTemplate to set */ public void setNamedParameterJdbcTemplate(NamedParameterJdbcTemplate namedParameterJdbcTemplate) { this.namedParameterJdbcTemplate = namedParameterJdbcTemplate; } }
/** * Copyright 2014 Zaradai * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.zaradai.kunzite.trader.marketdata; import com.zaradai.kunzite.trader.instruments.Instrument; import com.zaradai.kunzite.trader.mocks.InstrumentMocker; import org.junit.Before; import org.junit.Test; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.CoreMatchers.nullValue; import static org.junit.Assert.assertThat; public class DefaultMarketBookTest { private static final String TEST_INST_ID = "test"; private static final double TEST_PRICE = 52.45; private static final long TEST_QTY = 2345; private Instrument instrument; private DefaultMarketBook uut; @Before public void setUp() throws Exception { instrument = InstrumentMocker.create(TEST_INST_ID); uut = new DefaultMarketBook(instrument); } @Test public void shouldGetInstrumentId() throws Exception { assertThat(uut.getInstrumentId(), is(TEST_INST_ID)); } @Test public void shouldGetLastTrade() throws Exception { assertThat(uut.getLastTrade(), not(nullValue())); assertThat(uut.getLastTradedPrice(), is(0.0)); assertThat(uut.getLastTradedSize(), is(0L)); } @Test public void shouldGetBids() throws Exception { uut.setPrice(Side.Bid, 0, TEST_PRICE); uut.setPrice(Side.Bid, 1, TEST_PRICE); uut.setPrice(Side.Bid, 2, TEST_PRICE); uut.setPrice(Side.Bid, 3, TEST_PRICE); uut.setPrice(Side.Bid, 4, TEST_PRICE); uut.setPrice(Side.Bid, 5, TEST_PRICE); uut.setPrice(Side.Bid, 6, TEST_PRICE); uut.setPrice(Side.Bid, 7, TEST_PRICE); uut.setPrice(Side.Bid, 8, TEST_PRICE); uut.setPrice(Side.Bid, 9, TEST_PRICE); uut.setSize(Side.Bid, 0, TEST_QTY); uut.setSize(Side.Bid, 1, TEST_QTY); uut.setSize(Side.Bid, 2, TEST_QTY); uut.setSize(Side.Bid, 3, TEST_QTY); uut.setSize(Side.Bid, 4, TEST_QTY); uut.setSize(Side.Bid, 5, TEST_QTY); uut.setSize(Side.Bid, 6, TEST_QTY); uut.setSize(Side.Bid, 7, TEST_QTY); uut.setSize(Side.Bid, 8, TEST_QTY); uut.setSize(Side.Bid, 9, TEST_QTY); assertThat(uut.bestBid(), is(TEST_PRICE)); assertThat(uut.bestBidSize(), is(TEST_QTY)); assertThat(uut.getBid(0), is(TEST_PRICE)); assertThat(uut.getBid(1), is(TEST_PRICE)); assertThat(uut.getBid(2), is(TEST_PRICE)); assertThat(uut.getBid(3), is(TEST_PRICE)); assertThat(uut.getBid(4), is(TEST_PRICE)); assertThat(uut.getBid(5), is(TEST_PRICE)); assertThat(uut.getBid(6), is(TEST_PRICE)); assertThat(uut.getBid(7), is(TEST_PRICE)); assertThat(uut.getBid(8), is(TEST_PRICE)); assertThat(uut.getBid(9), is(TEST_PRICE)); assertThat(uut.getBidSize(0), is(TEST_QTY)); assertThat(uut.getBidSize(1), is(TEST_QTY)); assertThat(uut.getBidSize(2), is(TEST_QTY)); assertThat(uut.getBidSize(3), is(TEST_QTY)); assertThat(uut.getBidSize(4), is(TEST_QTY)); assertThat(uut.getBidSize(5), is(TEST_QTY)); assertThat(uut.getBidSize(6), is(TEST_QTY)); assertThat(uut.getBidSize(7), is(TEST_QTY)); assertThat(uut.getBidSize(8), is(TEST_QTY)); assertThat(uut.getBidSize(9), is(TEST_QTY)); } @Test public void shouldGetAsks() throws Exception { uut.setPrice(Side.Ask, 0, TEST_PRICE); uut.setPrice(Side.Ask, 1, TEST_PRICE); uut.setPrice(Side.Ask, 2, TEST_PRICE); uut.setPrice(Side.Ask, 3, TEST_PRICE); uut.setPrice(Side.Ask, 4, TEST_PRICE); uut.setPrice(Side.Ask, 5, TEST_PRICE); uut.setPrice(Side.Ask, 6, TEST_PRICE); uut.setPrice(Side.Ask, 7, TEST_PRICE); uut.setPrice(Side.Ask, 8, TEST_PRICE); uut.setPrice(Side.Ask, 9, TEST_PRICE); uut.setSize(Side.Ask, 0, TEST_QTY); uut.setSize(Side.Ask, 1, TEST_QTY); uut.setSize(Side.Ask, 2, TEST_QTY); uut.setSize(Side.Ask, 3, TEST_QTY); uut.setSize(Side.Ask, 4, TEST_QTY); uut.setSize(Side.Ask, 5, TEST_QTY); uut.setSize(Side.Ask, 6, TEST_QTY); uut.setSize(Side.Ask, 7, TEST_QTY); uut.setSize(Side.Ask, 8, TEST_QTY); uut.setSize(Side.Ask, 9, TEST_QTY); assertThat(uut.bestAsk(), is(TEST_PRICE)); assertThat(uut.bestAskSize(), is(TEST_QTY)); assertThat(uut.getAsk(0), is(TEST_PRICE)); assertThat(uut.getAsk(1), is(TEST_PRICE)); assertThat(uut.getAsk(2), is(TEST_PRICE)); assertThat(uut.getAsk(3), is(TEST_PRICE)); assertThat(uut.getAsk(4), is(TEST_PRICE)); assertThat(uut.getAsk(5), is(TEST_PRICE)); assertThat(uut.getAsk(6), is(TEST_PRICE)); assertThat(uut.getAsk(7), is(TEST_PRICE)); assertThat(uut.getAsk(8), is(TEST_PRICE)); assertThat(uut.getAsk(9), is(TEST_PRICE)); assertThat(uut.getAskSize(0), is(TEST_QTY)); assertThat(uut.getAskSize(1), is(TEST_QTY)); assertThat(uut.getAskSize(2), is(TEST_QTY)); assertThat(uut.getAskSize(3), is(TEST_QTY)); assertThat(uut.getAskSize(4), is(TEST_QTY)); assertThat(uut.getAskSize(5), is(TEST_QTY)); assertThat(uut.getAskSize(6), is(TEST_QTY)); assertThat(uut.getAskSize(7), is(TEST_QTY)); assertThat(uut.getAskSize(8), is(TEST_QTY)); assertThat(uut.getAskSize(9), is(TEST_QTY)); } @Test public void shouldHandleOutOfRange() throws Exception { uut.setPrice(Side.Bid, -1, TEST_PRICE); uut.setPrice(Side.Bid, 10, TEST_PRICE); uut.setPrice(Side.Ask, -1, TEST_PRICE); uut.setPrice(Side.Ask, 10, TEST_PRICE); uut.setSize(Side.Bid, -1, TEST_QTY); uut.setSize(Side.Bid, 10, TEST_QTY); uut.setSize(Side.Ask, -1, TEST_QTY); uut.setSize(Side.Ask, 10, TEST_QTY); assertThat(uut.getAskSize(10), is(0L)); assertThat(uut.getBidSize(10), is(0L)); assertThat(uut.getAskSize(-1), is(0L)); assertThat(uut.getBidSize(-1), is(0L)); assertThat(uut.getAsk(10), is(0.0)); assertThat(uut.getBid(10), is(0.0)); assertThat(uut.getAsk(-1), is(0.0)); assertThat(uut.getBid(-1), is(0.0)); } @Test public void shouldGetDepth() throws Exception { uut.setPrice(Side.Ask, 0, TEST_PRICE); uut.setPrice(Side.Ask, 1, TEST_PRICE); uut.setPrice(Side.Ask, 2, TEST_PRICE); uut.setPrice(Side.Bid, 0, TEST_PRICE); uut.setPrice(Side.Bid, 1, TEST_PRICE); uut.setPrice(Side.Bid, 2, TEST_PRICE); uut.setPrice(Side.Bid, 3, TEST_PRICE); uut.setPrice(Side.Bid, 4, TEST_PRICE); uut.setPrice(Side.Bid, 5, TEST_PRICE); assertThat(uut.getAskDepth(), is(3)); assertThat(uut.getBidDepth(), is(6)); } @Test public void shouldGetOhlc() throws Exception { uut.setOpen(TEST_PRICE); uut.setHigh(TEST_PRICE); uut.setLow(TEST_PRICE); uut.setPrevClose(TEST_PRICE); assertThat(uut.getOpen(), is(TEST_PRICE)); assertThat(uut.getHigh(), is(TEST_PRICE)); assertThat(uut.getLow(), is(TEST_PRICE)); assertThat(uut.getPrevClose(), is(TEST_PRICE)); } @Test public void shouldSetRandomDepth() throws Exception { uut.setSize(Side.Bid, 5, TEST_QTY); uut.setSize(Side.Ask, 5, TEST_QTY); uut.setPrice(Side.Bid, 5, TEST_PRICE); uut.setPrice(Side.Ask, 5, TEST_PRICE); assertThat(uut.getAsk(5), is(TEST_PRICE)); assertThat(uut.getBid(5), is(TEST_PRICE)); assertThat(uut.getAskSize(5), is(TEST_QTY)); assertThat(uut.getBidSize(5), is(TEST_QTY)); } @Test public void shouldResetData() throws Exception { uut.setSize(Side.Bid, 0, TEST_QTY); uut.setSize(Side.Bid, 1, TEST_QTY); uut.setSize(Side.Bid, 2, TEST_QTY); uut.setPrice(Side.Bid, 0, TEST_PRICE); uut.setPrice(Side.Bid, 1, TEST_PRICE); uut.setPrice(Side.Bid, 2, TEST_PRICE); uut.reset(); assertThat(uut.getBid(0), is(0.0)); assertThat(uut.getBidSize(0), is(0L)); } }
/** * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.brixcms.plugin.site.picker.reference; import java.io.Serializable; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Optional; import java.util.Set; import java.util.TreeSet; import org.apache.wicket.ajax.AjaxRequestTarget; import org.apache.wicket.ajax.markup.html.AjaxLink; import org.apache.wicket.ajax.markup.html.form.AjaxButton; import org.apache.wicket.markup.html.WebMarkupContainer; import org.apache.wicket.markup.html.form.Form; import org.apache.wicket.markup.html.form.FormComponent; import org.apache.wicket.markup.html.form.TextField; import org.apache.wicket.markup.html.panel.FeedbackPanel; import org.apache.wicket.markup.html.panel.Panel; import org.apache.wicket.model.CompoundPropertyModel; import org.apache.wicket.model.IModel; import org.apache.wicket.model.Model; import org.apache.wicket.model.PropertyModel; import org.apache.wicket.model.ResourceModel; import org.apache.wicket.util.lang.Objects; import org.apache.wicket.util.string.StringValue; import org.brixcms.web.BrixFeedbackPanel; import org.brixcms.web.nodepage.BrixPageParameters; import org.brixcms.web.util.DisabledClassAppender; import com.inmethod.grid.IDataSource; import com.inmethod.grid.IGridColumn; import com.inmethod.grid.SizeUnit; import com.inmethod.grid.column.CheckBoxColumn; import com.inmethod.grid.column.editable.EditablePropertyColumn; import com.inmethod.grid.column.editable.SubmitCancelColumn; import com.inmethod.grid.datagrid.DataGrid; public abstract class QueryParametersTab extends Panel { AjaxLink<?> removeSelected; private Entry newEntry = new Entry(); private final DataSource dataSource = new DataSource(); public QueryParametersTab(String id) { super(id); setOutputMarkupId(true); final FeedbackPanel feedback = new BrixFeedbackPanel("feedback"); feedback.setOutputMarkupId(true); add(feedback); Form<Entry> newForm = new Form<Entry>("newForm", new CompoundPropertyModel<Entry>(new PropertyModel<Entry>(this, "newEntry"))); add(newForm); newForm.add(new TextField<String>("key").setRequired(true)); newForm.add(new TextField<String>("value").setRequired(true)); newForm.add(new AjaxButton("add") { @Override protected void onSubmit(AjaxRequestTarget target) { dataSource.addEntry(newEntry); dataSource.storeToPageParameters(); target.add(QueryParametersTab.this); newEntry = new Entry(); } @Override protected void onError(AjaxRequestTarget target) { target.add(feedback); } }); List<IGridColumn> columns = new ArrayList<IGridColumn>(); columns.add(new CheckBoxColumn("checkbox")); columns.add(new EditablePropertyColumn(new ResourceModel("key"), "key") { @Override protected void addValidators(FormComponent component) { component.setRequired(true); } }); columns.add(new EditablePropertyColumn(new ResourceModel("value"), "value") { @Override protected void addValidators(FormComponent component) { component.setRequired(true); } }); columns.add(new SubmitCancelColumn("submitCancel", new ResourceModel("edit")) { @Override protected void onSubmitted(AjaxRequestTarget target, IModel rowModel, WebMarkupContainer rowComponent) { dataSource.storeToPageParameters(); super.onSubmitted(target, rowModel, rowComponent); target.add(feedback); } @Override protected void onError(AjaxRequestTarget target, IModel rowModel, WebMarkupContainer rowComponent) { target.add(feedback); } }); final DataGrid grid = new DataGrid("grid", dataSource, columns) { @Override public void onItemSelectionChanged(IModel item, boolean newValue) { Optional<AjaxRequestTarget> target = getRequestCycle().find(AjaxRequestTarget.class); if (target.isPresent()) { target.get().add(removeSelected); } super.onItemSelectionChanged(item, newValue); } }; grid.setRowsPerPage(Integer.MAX_VALUE); grid.setAllowSelectMultiple(true); grid.setContentHeight(14, SizeUnit.EM); grid.setSelectToEdit(false); add(grid); add(removeSelected = new AjaxLink<Void>("removeSelected") { @Override public void onClick(AjaxRequestTarget target) { Collection<IModel> items = grid.getSelectedItems(); if (items.size() > 0) { for (IModel model : items) { Entry entry = (Entry) model.getObject(); dataSource.removeEntry(entry); } grid.resetSelectedItems(); dataSource.storeToPageParameters(); grid.markAllItemsDirty(); grid.update(); } else { target.appendJavaScript("alert('" + getString("noItemsSelected") + "');"); } } @Override public boolean isEnabled() { return !grid.getSelectedItems().isEmpty(); } }); removeSelected.add(new DisabledClassAppender()); } protected abstract BrixPageParameters getPageParameters(); private class DataSource implements IDataSource { public void detach() { entries = null; } public IModel model(Object object) { return new Model<Serializable>((Serializable) object) { @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj instanceof Model == false) { return false; } Model that = (Model) obj; return Objects.equal(getObject(), that.getObject()); } @Override public int hashCode() { return getObject().hashCode(); } }; } private Set<Entry> getEntries() { if (entries == null) { entries = new TreeSet<Entry>(); for (String s : getPageParameters().getNamedKeys()) { for (StringValue v : getPageParameters().getValues(s)) { Entry e = new Entry(); e.key = s; e.value = v.toString(); entries.add(e); } } } return entries; } public void query(IQuery query, IQueryResult result) { result.setTotalCount(getEntries().size()); result.setItems(getEntries().iterator()); } private void addEntry(Entry entry) { getEntries().add(entry); } private void removeEntry(Entry entry) { getEntries().remove(entry); } private void storeToPageParameters() { if (entries != null) { getPageParameters().clearNamed(); for (Entry entry : entries) { getPageParameters().set(entry.key, entry.value); } } } private Set<Entry> entries = null; } private static class Entry implements Serializable, Comparable<Entry> { private String key; private String value; @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj instanceof Entry == false) { return false; } Entry that = (Entry) obj; return Objects.equal(key, that.key) && Objects.equal(value, that.value); } public int compareTo(Entry o) { int v = key.compareTo(o.key); if (v != 0) { return v; } else { return value.compareTo(o.value); } } @Override public int hashCode() { return Objects.hashCode(new Object[] { this.key, this.value }); } } ; }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.jetbrains.python.testing; import com.google.common.collect.ObjectArrays; import com.intellij.openapi.module.Module; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.TextFieldWithBrowseButton; import com.intellij.openapi.util.NlsContexts; import com.intellij.openapi.util.NlsSafe; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.psi.PsiDirectory; import com.intellij.ui.IdeBorderFactory; import com.intellij.ui.TextAccessor; import com.intellij.ui.components.JBRadioButton; import com.intellij.ui.components.JBTextField; import com.intellij.uiDesigner.core.GridConstraints; import com.intellij.util.ThreeState; import com.intellij.util.ui.JBUI; import com.jetbrains.PySymbolFieldWithBrowseButton; import com.jetbrains.extensions.ContextAnchor; import com.jetbrains.extensions.ModuleBasedContextAnchor; import com.jetbrains.extensions.ProjectSdkContextAnchor; import com.jetbrains.python.psi.types.TypeEvalContext; import com.jetbrains.python.run.AbstractPyCommonOptionsForm; import com.jetbrains.python.run.PyBrowseActionListener; import com.jetbrains.python.run.PyCommonOptionsFormFactory; import com.jetbrains.python.run.targetBasedConfiguration.PyRunTargetVariant; import com.jetbrains.reflection.ReflectionUtilsKt; import com.jetbrains.reflection.SimplePropertiesProvider; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.awt.*; import java.util.*; import java.util.List; import java.util.regex.Pattern; /** * Form to display run configuration. * It displays target type, target, additional arguments, custom options (if provided) and environment options * Create with {@link #create(PyAbstractTestConfiguration, PyTestCustomOption...)}} * * @author Ilya.Kazakevich */ public final class PyTestSharedForm implements SimplePropertiesProvider { private JPanel myPanel; /** * Panel for test targets */ private JPanel myTargets; /** * Panel for environment options */ private JPanel myOptionsPanel; /** * Panel for custom options, specific for runner and for "Additional Arguments"al;sop */ private JPanel myCustomOptionsPanel; private JPanel myPanelForTargetFields; private final ButtonGroup myButtonGroup = new ButtonGroup(); private AbstractPyCommonOptionsForm myOptionsForm; private final Map<String, OptionHolder> myCustomOptions = new LinkedHashMap<>(); // TDO: Linked -- order private final TextFieldWithBrowseButton myPathTarget; private final PySymbolFieldWithBrowseButton myPythonTarget; @NotNull JPanel getPanel() { return myPanel; } @NotNull @Override public List<String> getPropertyNames() { return new ArrayList<>(myCustomOptions.keySet()); } @Override public void setPropertyValue(@NotNull final String propertyName, @Nullable final String propertyValue) { myCustomOptions.get(propertyName).myOptionValue.setText(propertyValue != null ? propertyValue : ""); } @Nullable @Override public String getPropertyValue(@NotNull final String propertyName) { return myCustomOptions.get(propertyName).myOptionValue.getText(); } private PyTestSharedForm(@Nullable final Module module, @NotNull final PyAbstractTestConfiguration configuration) { myPathTarget = new TextFieldWithBrowseButton(); final Project project = configuration.getProject(); myPathTarget.addBrowseFolderListener(new PyBrowseActionListener(configuration)); final TypeEvalContext context = TypeEvalContext.userInitiated(project, null); final ThreeState testClassRequired = configuration.isTestClassRequired(); ContextAnchor contentAnchor = (module != null ? new ModuleBasedContextAnchor(module) : new ProjectSdkContextAnchor(project, configuration.getSdk())); myPythonTarget = new PySymbolFieldWithBrowseButton(contentAnchor, element -> { if (element instanceof PsiDirectory) { // Folder is always accepted because we can't be sure // if it is test-enabled or not return true; } return PyTestsSharedKt.isTestElement(element, testClassRequired, context); }, () -> { final String workingDirectory = configuration.getWorkingDirectory(); if (StringUtil.isEmpty(workingDirectory)) { return null; } return LocalFileSystem.getInstance().findFileByPath(workingDirectory); }); } /** * Titles border used among test run configurations */ public static void setBorderToPanel(@NotNull final JPanel panel, @NotNull final @NlsSafe String title) { panel.setBorder(IdeBorderFactory.createTitledBorder(title, false)); } /** * @param configuration configuration to configure form on creation * @param customOptions additional option names this form shall support. Make sure your configuration has appropriate properties. */ @NotNull public static PyTestSharedForm create(@NotNull final PyAbstractTestConfiguration configuration, final PyTestCustomOption @NotNull ... customOptions) { final PyTestSharedForm form = new PyTestSharedForm(configuration.getModule(), configuration); for (final PyRunTargetVariant testTargetType : PyRunTargetVariant.values()) { final JBRadioButton button = new JBRadioButton(StringUtil.capitalize(testTargetType.getCustomName().toLowerCase(Locale.getDefault()))); button.setActionCommand(testTargetType.name()); // NON-NLS button.addActionListener(o -> form.onTargetTypeChanged()); form.myButtonGroup.add(button); form.myTargets.add(button); } form.myButtonGroup.getElements().nextElement().setSelected(true); form.myOptionsForm = PyCommonOptionsFormFactory.getInstance().createForm(configuration.getCommonOptionsFormData()); final GridConstraints constraints = new GridConstraints(); constraints.setFill(GridConstraints.FILL_BOTH); form.myOptionsPanel.add(form.myOptionsForm.getMainPanel(), constraints); setBorderToPanel(form.myPanel, configuration.getTestFrameworkName()); form.addCustomOptions( ObjectArrays.concat(customOptions, new PyTestCustomOption( PyTestsSharedKt.getAdditionalArgumentsProperty(), PyRunTargetVariant.values())) ); configuration.copyTo(ReflectionUtilsKt.getProperties(form, null, true)); return form; } private void addCustomOptions(final PyTestCustomOption @NotNull ... customOptions) { if (customOptions.length == 0) { return; } final Map<String, JBTextField> optionValueFields = new HashMap<>(); for (final PyTestCustomOption option : customOptions) { final JBTextField textField = new JBTextField(); optionValueFields.put(option.getName(), textField); } final GridBagConstraints constraints = new GridBagConstraints(); constraints.insets = JBUI.insets(3); constraints.gridy = 0; constraints.anchor = GridBagConstraints.LINE_START; for (final PyTestCustomOption option : customOptions) { final JBTextField textField = optionValueFields.get(option.getName()); final JLabel label = new JLabel(option.getLocalizedName()); // NON-NLS label.setHorizontalAlignment(SwingConstants.LEFT); constraints.fill = GridBagConstraints.NONE; constraints.gridx = 0; constraints.weightx = 0; myCustomOptionsPanel.add(label, constraints); constraints.gridx = 1; constraints.weightx = 1.0; constraints.fill = GridBagConstraints.HORIZONTAL; myCustomOptionsPanel.add(textField, constraints); constraints.gridy++; myCustomOptions.put(option.getName(), new OptionHolder(option, label, textField)); } } @NotNull AbstractPyCommonOptionsForm getOptionsForm() { return myOptionsForm; } @NotNull public String getTarget() { // We should always use system-independent path because only this type of path is processed correctly // when stored (folder changed to macros to prevent hard code) final String targetText = getActiveTextField().getText().trim(); return getTargetType() == PyRunTargetVariant.PATH ? FileUtil.toSystemIndependentName(targetText) : targetText; } public void setTarget(@NotNull final String targetText) { getActiveTextField().setText(targetText); } private void onTargetTypeChanged() { final PyRunTargetVariant targetType = getTargetType(); for (final OptionHolder optionHolder : myCustomOptions.values()) { optionHolder.setType(targetType); } Arrays.stream(myPanelForTargetFields.getComponents()).forEach(myPanelForTargetFields::remove); final GridBagConstraints cons = new GridBagConstraints(); cons.fill = GridBagConstraints.HORIZONTAL; cons.weightx = 1; if (targetType == PyRunTargetVariant.PATH) { myPanelForTargetFields.add(myPathTarget, cons); } else if (targetType == PyRunTargetVariant.PYTHON) { myPanelForTargetFields.add(myPythonTarget, cons); } } @NotNull private TextAccessor getActiveTextField() { return (getTargetType() == PyRunTargetVariant.PATH ? myPathTarget : myPythonTarget); } @SuppressWarnings("WeakerAccess") // Accessor for property @NotNull public PyRunTargetVariant getTargetType() { return PyRunTargetVariant.valueOf(myButtonGroup.getSelection().getActionCommand()); } @SuppressWarnings("unused") // Mutator for property public void setTargetType(@NotNull final PyRunTargetVariant target) { final Enumeration<AbstractButton> elements = myButtonGroup.getElements(); while (elements.hasMoreElements()) { final AbstractButton button = elements.nextElement(); if (PyRunTargetVariant.valueOf(button.getActionCommand()) == target) { myButtonGroup.setSelected(button.getModel(), true); break; } } onTargetTypeChanged(); } private static final class OptionHolder { @NotNull private final PyTestCustomOption myOption; @NotNull private final JLabel myOptionLabel; @NotNull private final JTextField myOptionValue; private OptionHolder(@NotNull final PyTestCustomOption option, @NotNull final JLabel optionLabel, @NotNull final JTextField optionValue) { myOption = option; myOptionLabel = optionLabel; myOptionValue = optionValue; } private void setType(@NotNull final PyRunTargetVariant type) { final boolean visible = myOption.getMySupportedTypes().contains(type); myOptionLabel.setVisible(visible); myOptionValue.setVisible(visible); } } }
/* * Copyright 2015 Open mHealth * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.openmhealth.shim.jawbone; import com.fasterxml.jackson.databind.JsonNode; import org.openmhealth.shim.*; import org.openmhealth.shim.jawbone.mapper.*; import org.slf4j.Logger; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.http.HttpHeaders; import org.springframework.http.HttpMethod; import org.springframework.http.ResponseEntity; import org.springframework.security.oauth2.client.OAuth2RestOperations; import org.springframework.security.oauth2.client.resource.OAuth2ProtectedResourceDetails; import org.springframework.security.oauth2.client.resource.UserRedirectRequiredException; import org.springframework.security.oauth2.client.token.AccessTokenRequest; import org.springframework.security.oauth2.client.token.RequestEnhancer; import org.springframework.security.oauth2.client.token.grant.code.AuthorizationCodeAccessTokenProvider; import org.springframework.stereotype.Component; import org.springframework.util.MultiValueMap; import org.springframework.util.StringUtils; import org.springframework.web.client.HttpClientErrorException; import org.springframework.web.client.HttpServerErrorException; import org.springframework.web.util.UriComponentsBuilder; import java.time.OffsetDateTime; import java.util.Arrays; import java.util.List; import static java.util.Collections.singletonList; import static org.slf4j.LoggerFactory.getLogger; /** * Encapsulates parameters specific to the Jawbone API and processes requests for Jawbone data from shimmer. * * @author Danilo Bonilla * @author Chris Schaefbauer */ @Component @ConfigurationProperties(prefix = "openmhealth.shim.jawbone") public class JawboneShim extends OAuth2ShimBase { public static final String SHIM_KEY = "jawbone"; private static final String DATA_URL = "https://jawbone.com/nudge/api/v.1.1/users/@me/"; private static final String AUTHORIZE_URL = "https://jawbone.com/auth/oauth2/auth"; private static final String TOKEN_URL = "https://jawbone.com/auth/oauth2/token"; public static final List<String> JAWBONE_SCOPES = Arrays.asList( "extended_read", "weight_read", "heartrate_read", "meal_read", "move_read", "sleep_read"); private static final Logger logger = getLogger(JawboneShim.class); @Autowired public JawboneShim(ApplicationAccessParametersRepo applicationParametersRepo, AuthorizationRequestParametersRepo authorizationRequestParametersRepo, AccessParametersRepo accessParametersRepo, ShimServerConfig shimServerConfig1) { super(applicationParametersRepo, authorizationRequestParametersRepo, accessParametersRepo, shimServerConfig1); } @Override public String getLabel() { return "Jawbone UP"; } @Override public String getShimKey() { return SHIM_KEY; } @Override public String getBaseAuthorizeUrl() { return AUTHORIZE_URL; } @Override public String getBaseTokenUrl() { return TOKEN_URL; } @Override public List<String> getScopes() { return JAWBONE_SCOPES; } public AuthorizationCodeAccessTokenProvider getAuthorizationCodeAccessTokenProvider() { return new JawboneAuthorizationCodeAccessTokenProvider(); } @Override public ShimDataType[] getShimDataTypes() { return new JawboneDataTypes[] { JawboneDataTypes.SLEEP, JawboneDataTypes.ACTIVITY, JawboneDataTypes.BODY_MASS_INDEX, JawboneDataTypes.WEIGHT, JawboneDataTypes.HEART_RATE, JawboneDataTypes.STEPS}; } public enum JawboneDataTypes implements ShimDataType { SLEEP("sleeps"), ACTIVITY("workouts"), WEIGHT("body_events"), STEPS("moves"), BODY_MASS_INDEX("body_events"), HEART_RATE("heartrates"); private String endPoint; JawboneDataTypes(String endPoint) { this.endPoint = endPoint; } public String getEndPoint() { return endPoint; } } protected ResponseEntity<ShimDataResponse> getData(OAuth2RestOperations restTemplate, ShimDataRequest shimDataRequest) throws ShimException { final JawboneDataTypes jawboneDataType; try { jawboneDataType = JawboneDataTypes.valueOf( shimDataRequest.getDataTypeKey().trim().toUpperCase()); } catch (NullPointerException | IllegalArgumentException e) { throw new ShimException("Null or Invalid data type parameter: " + shimDataRequest.getDataTypeKey() + " in shimDataRequest, cannot retrieve data."); } // FIXME this needs to get changed or documented long numToReturn = 100; if (shimDataRequest.getNumToReturn() != null) { numToReturn = shimDataRequest.getNumToReturn(); } OffsetDateTime today = OffsetDateTime.now(); OffsetDateTime startDateTime = shimDataRequest.getStartDateTime() == null ? today.minusDays(1) : shimDataRequest.getStartDateTime(); long startTimeInEpochSecond = startDateTime.toEpochSecond(); // We are inclusive of the last day, so we need to add an extra day since we are dealing with start of day, // and would miss the activities that occurred during the last day within going to midnight of that day OffsetDateTime endDateTime = shimDataRequest.getEndDateTime() == null ? today.plusDays(1) : shimDataRequest.getEndDateTime().plusDays(1); long endTimeInEpochSecond = endDateTime.toEpochSecond(); UriComponentsBuilder uriComponentsBuilder = UriComponentsBuilder.fromUriString(DATA_URL).path(jawboneDataType.getEndPoint()) .queryParam("start_time", startTimeInEpochSecond).queryParam("end_time", endTimeInEpochSecond) .queryParam("limit", numToReturn); ResponseEntity<JsonNode> responseEntity; try { responseEntity = restTemplate.getForEntity(uriComponentsBuilder.build().encode().toUri(), JsonNode.class); } catch (HttpClientErrorException | HttpServerErrorException e) { // FIXME figure out how to handle this logger.error("A request for Jawbone data failed.", e); throw e; } if (shimDataRequest.getNormalize()) { JawboneDataPointMapper mapper; switch ( jawboneDataType ) { case WEIGHT: mapper = new JawboneBodyWeightDataPointMapper(); break; case STEPS: mapper = new JawboneStepCountDataPointMapper(); break; case BODY_MASS_INDEX: mapper = new JawboneBodyMassIndexDataPointMapper(); break; case ACTIVITY: mapper = new JawbonePhysicalActivityDataPointMapper(); break; case SLEEP: mapper = new JawboneSleepDurationDataPointMapper(); break; case HEART_RATE: mapper = new JawboneHeartRateDataPointMapper(); break; default: throw new UnsupportedOperationException(); } return ResponseEntity.ok().body(ShimDataResponse .result(JawboneShim.SHIM_KEY, mapper.asDataPoints(singletonList(responseEntity.getBody())))); } else { return ResponseEntity.ok().body(ShimDataResponse.result(JawboneShim.SHIM_KEY, responseEntity.getBody())); } } @Override protected String getAuthorizationUrl(UserRedirectRequiredException exception) { final OAuth2ProtectedResourceDetails resource = getResource(); UriComponentsBuilder uriBuilder = UriComponentsBuilder .fromUriString(exception.getRedirectUri()) .queryParam("state", exception.getStateKey()) .queryParam("client_id", resource.getClientId()) .queryParam("response_type", "code") .queryParam("scope", StringUtils.collectionToDelimitedString(resource.getScope(), " ")) .queryParam("redirect_uri", getCallbackUrl()); return uriBuilder.build().encode().toUriString(); } /** * Simple overrides to base spring class from oauth. */ public class JawboneAuthorizationCodeAccessTokenProvider extends AuthorizationCodeAccessTokenProvider { public JawboneAuthorizationCodeAccessTokenProvider() { this.setTokenRequestEnhancer(new JawboneTokenRequestEnhancer()); } @Override protected HttpMethod getHttpMethod() { return HttpMethod.GET; } } /** * Adds jawbone required parameters to authorization token requests. */ private class JawboneTokenRequestEnhancer implements RequestEnhancer { @Override public void enhance(AccessTokenRequest request, OAuth2ProtectedResourceDetails resource, MultiValueMap<String, String> form, HttpHeaders headers) { form.set("client_id", resource.getClientId()); form.set("client_secret", resource.getClientSecret()); } } }
/* Copyright 2004 Arnaud CEOL Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package psidev.psi.mi.filemakers.xmlMaker.gui; import java.awt.BorderLayout; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.ItemEvent; import java.awt.event.ItemListener; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.awt.event.MouseListener; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.net.URL; import javax.swing.Box; import javax.swing.BoxLayout; import javax.swing.DefaultListModel; import javax.swing.JButton; import javax.swing.JCheckBox; import javax.swing.JFileChooser; import javax.swing.JFrame; import javax.swing.JLabel; import javax.swing.JList; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.JTextField; import javax.swing.border.TitledBorder; import psidev.psi.mi.filemakers.xmlMaker.structure.Dictionary; import psidev.psi.mi.filemakers.xmlMaker.structure.DictionaryContainer; import psidev.psi.mi.filemakers.xsd.Utils; /** * Display a list of the dictionnary and allows to load or select them * * @author Arnaud Ceol, University of Rome "Tor Vergata", Mint group, * arnaud.ceol@gmail.com */ public class DictionaryPanel extends JPanel { public DefaultListModel listModel = new DefaultListModel(); public JTextField separatorLbl = new JTextField(3); JCheckBox caseSensitiveb = new JCheckBox("case sensitive"); /** * Load a dictionnary. The dictionnary is load from a file specified in an * OptionPanel */ public void editDictionnary() throws IOException { if (list.getSelectedIndex() == -1) { JOptionPane.showMessageDialog(new JFrame(), "No dictionnary selected", "[PSI makers: PSI maker]", JOptionPane.ERROR_MESSAGE); return; } Dictionary curentDictionnary = (Dictionary) list.getSelectedValue(); JTextField separator = new JTextField(curentDictionnary.getSeparator()); JTextField fileName = new JTextField(curentDictionnary.getFileURL() .getPath()); JCheckBox caseSensitive = new JCheckBox(); caseSensitive.setSelected(curentDictionnary.isCaseSensitive()); try { Box panel = new Box(BoxLayout.Y_AXIS); String defaultDirectory = Utils.lastVisitedDirectory; if (Utils.lastVisitedDictionaryDirectory != null) defaultDirectory = Utils.lastVisitedDictionaryDirectory; JFileChooser fc = new JFileChooser(defaultDirectory); fc.setSelectedFile(new File(curentDictionnary.getFileURL() .getPath())); panel.add(new JLabel("Separator")); panel.add(separator); panel.add(new JLabel("Case sensitive")); panel.add(caseSensitive); fc.setAccessory(panel); int returnVal = fc.showOpenDialog(new JFrame()); if (returnVal != JFileChooser.APPROVE_OPTION) { return; } URL url = fc.getSelectedFile().toURL(); Dictionary newDico = new Dictionary(url, separator.getText(), caseSensitive.isSelected()); listModel.setElementAt(newDico, list.getSelectedIndex()); caseSensitiveb.setSelected(newDico.isCaseSensitive()); separatorLbl.setText(newDico.getSeparator()); } catch (FileNotFoundException fe) { JOptionPane.showMessageDialog(new JFrame(), "Unable to load file", "[PSI makers: PSI maker] load dictionnary", JOptionPane.ERROR_MESSAGE); } catch (NullPointerException npe) { JOptionPane.showMessageDialog(new JFrame(), "Unable to load file", "[PSI makers: PSI maker] load dictionnary", JOptionPane.ERROR_MESSAGE); } } /** * * @uml.property name="list" * @uml.associationEnd * @uml.property name="list" multiplicity="(0 -1)" * elementType="mint.filemakers.xmlMaker.structure.Dictionary" */ public JList list = new JList(listModel); /** * * @uml.property name="dictionaries" * @uml.associationEnd * @uml.property name="dictionaries" multiplicity="(1 1)" */ public DictionaryContainer dictionaries; public DictionaryPanel(DictionaryContainer dictionaries) { super(new BorderLayout()); separatorLbl.setEditable(false); this.dictionaries = dictionaries; MouseListener mouseListener = new MouseAdapter() { public void mouseClicked(MouseEvent e) { Dictionary curentDictionnary = (Dictionary) list .getSelectedValue(); if (curentDictionnary != null) { caseSensitiveb.setSelected(curentDictionnary.caseSensitive); separatorLbl.setText(curentDictionnary.getSeparator()); } } }; list.addMouseListener(mouseListener); Box buttonsPanel = new Box(BoxLayout.Y_AXIS); buttonsPanel.setBorder(new TitledBorder("")); Box line1Panel = new Box(BoxLayout.X_AXIS); line1Panel.setBorder(new TitledBorder("")); Box line2Panel = new Box(BoxLayout.X_AXIS); line2Panel.setBorder(new TitledBorder("")); JButton newDicob = new JButton("New"); Utils.setDefaultSize(newDicob); newDicob.addActionListener(new addDictionnaryListener()); line1Panel.add(newDicob); JButton loadDicob = new JButton("Edit"); Utils.setDefaultSize(loadDicob); loadDicob.addActionListener(new editDictionnaryListener()); line1Panel.add(loadDicob); JButton displayDicob = new JButton("View"); Utils.setDefaultSize(displayDicob); displayDicob.addActionListener(new displayALineListener()); line1Panel.add(displayDicob); JButton separatorb = new JButton("Separator"); Utils.setDefaultSize(separatorb); separatorb.addActionListener(new separatorListener()); line2Panel.add(separatorb); line2Panel.add(separatorLbl); caseSensitiveb.addItemListener(new caseSensitiveListener()); line2Panel.add(caseSensitiveb); buttonsPanel.add(line1Panel); buttonsPanel.add(line2Panel); list.setFixedCellHeight(10); list.setFixedCellWidth(60); list.setLayoutOrientation(JList.VERTICAL); list.setAutoscrolls(true); list.setVisible(true); listModel.removeAllElements(); JScrollPane scrollList = new JScrollPane(list); add(scrollList, BorderLayout.CENTER); add(buttonsPanel, BorderLayout.SOUTH); } /** * index of the dictionnary delected in the list * * @return index of the dictionnary curently selected on the list */ public int getSelectedDictionnary() { return list.getSelectedIndex(); } public String[] getExampleList() { return ((Dictionary) listModel.elementAt(list.getSelectedIndex())) .exampleList(); } /** * use to load a new dictionnary and add it to the list */ public class editDictionnaryListener implements ActionListener { public void actionPerformed(ActionEvent e) { try { editDictionnary(); } catch (IOException urie) { JOptionPane.showMessageDialog(new JFrame(), "Unable to load file", "[PSI makers: PSI maker] load flat file", JOptionPane.ERROR_MESSAGE); } } } /** * use to remove a dictionnary and add it to the list */ public class removeDictionnaryListener implements ActionListener { public void actionPerformed(ActionEvent e) { removeDictionnary(); } } /** * use to remove a dictionnary and add it to the list */ public class addDictionnaryListener implements ActionListener { public void actionPerformed(ActionEvent e) { try { addDictionnary(); list.setSelectedIndex(list.getLastVisibleIndex()); } catch (IOException urie) { JOptionPane.showMessageDialog(new JFrame(), "Unable to load file", "[PSI makers: PSI maker] load flat file", JOptionPane.ERROR_MESSAGE); } } } /** * use to remove a dictionnary and add it to the list */ public class separatorListener implements ActionListener { public void actionPerformed(ActionEvent e) { editSeparator(); } } public class caseSensitiveListener implements ItemListener { public void itemStateChanged(ItemEvent e) { setCaseSensitive(); } } /** * display a line from the dictionnary */ public class displayALineListener implements ActionListener { public void actionPerformed(ActionEvent e) { displayALine(); } } /** * Load a dictionnary. The dictionnary is load from a file specified in an * OptionPanel */ public void addDictionnary() throws IOException { JTextField separator = new JTextField(); JTextField fileName = new JTextField(); JCheckBox caseSensitive = new JCheckBox(); try { Box panel = new Box(BoxLayout.Y_AXIS); String defaultDirectory = Utils.lastVisitedDirectory; if (Utils.lastVisitedDictionaryDirectory != null) defaultDirectory = Utils.lastVisitedDictionaryDirectory; JFileChooser fc = new JFileChooser(defaultDirectory); panel.add(new JLabel("Separator")); panel.add(separator); panel.add(new JLabel("Case sensitive")); panel.add(caseSensitive); fc.setAccessory(panel); int returnVal = fc.showOpenDialog(new JFrame()); if (returnVal != JFileChooser.APPROVE_OPTION) { return; } URL url = fc.getSelectedFile().toURL(); Dictionary dico = new Dictionary(url, separator.getText(), caseSensitive.isSelected()); if (dico != null) { dico.index = listModel.getSize(); listModel.addElement(dico); /* add in the structure */ dictionaries.addDictionary(dico); } caseSensitiveb.setSelected(dico.isCaseSensitive()); separatorLbl.setText(dico.getSeparator()); } catch (FileNotFoundException fe) { JOptionPane.showMessageDialog(new JFrame(), "Unable to load file", "[PSI makers: PSI maker] load dictionnary", JOptionPane.ERROR_MESSAGE); } catch (NullPointerException npe) { JOptionPane.showMessageDialog(new JFrame(), "Unable to load file", "[PSI makers: PSI maker] load dictionnary", JOptionPane.ERROR_MESSAGE); } } /** * Load a dictionnary. The dictionnary is load from a file specified in an * OptionPanel */ public void removeDictionnary() { JTextField separator = new JTextField(); JTextField fileName = new JTextField(); JCheckBox caseSensitive = new JCheckBox(); if (list.getSelectedIndex() == -1) { JOptionPane.showMessageDialog(new JFrame(), "No dictionnary selected", "[PSI makers: PSI maker]", JOptionPane.ERROR_MESSAGE); return; } /* ask for confirmation */ int confirm = JOptionPane .showConfirmDialog( new JFrame(), "All associations done to this dictionnary will be lost. Do you want to continue?", "Associatation of a dictionnary", JOptionPane.YES_NO_OPTION); if (confirm != JOptionPane.YES_OPTION) { return; } listModel.setElementAt("empty", list.getSelectedIndex()); } /** * Load a dictionnary. The dictionnary is load from a file specified in an * OptionPanel */ public void editSeparator() { if (list.getSelectedIndex() == -1) { JOptionPane.showMessageDialog(new JFrame(), "No dictionnary selected", "[PSI makers: PSI maker]", JOptionPane.ERROR_MESSAGE); return; } Dictionary curentDictionnary = (Dictionary) list.getSelectedValue(); JTextField separator = new JTextField(curentDictionnary.getSeparator()); try { String s = (String) JOptionPane.showInputDialog(new JFrame( "[PSI makers: PSI maker] Flat File"), "Line Separator (use regular expression, e.g.: \\| \n", "\\|"); if (s != null) { curentDictionnary.setSeparator(s); separatorLbl.setText(s); } } catch (NullPointerException npe) { JOptionPane.showMessageDialog(new JFrame(), "Unable to load file", "[PSI makers: PSI maker] load dictionnary", JOptionPane.ERROR_MESSAGE); } } public void setCaseSensitive() { if (list.getSelectedIndex() == -1) { JOptionPane.showMessageDialog(new JFrame(), "No dictionnary selected", "[PSI makers: PSI maker]", JOptionPane.ERROR_MESSAGE); return; } Dictionary curentDictionnary = (Dictionary) list.getSelectedValue(); curentDictionnary.setCaseSensitive(caseSensitiveb.isSelected()); } /** * Display in a new frame a list of the data found in the line with the most * elements * */ public void displayALine() { if (list.getSelectedIndex() == -1) { JOptionPane.showMessageDialog(new JFrame(), "No dictionnary selected", "[PSI makers: PSI maker]", JOptionPane.ERROR_MESSAGE); return; } String exampleLine = ((Dictionary) list.getSelectedValue()) .exampleLine(); String separator = ((Dictionary) list.getSelectedValue()) .getSeparator(); JList exampleList = new JList((exampleLine + " ").split(separator)); JScrollPane scrollList = new JScrollPane(exampleList); JFrame frame = new JFrame(); Box box = new Box(BoxLayout.Y_AXIS); box.add(new JLabel("example line: " + exampleLine)); box.add(new JLabel("separator: " + separator + ", case sensitive: " + ((Dictionary) list.getSelectedValue()).isCaseSensitive())); box.add(scrollList); frame.getContentPane().add(box); frame.setTitle("dictionnary: " + list.getSelectedValue()); // frame.setSize(600, 300); frame.pack(); frame.show(); } public void reload() { listModel.clear(); for (int i = 0; i < dictionaries.dictionaries.size(); i++) { listModel .addElement(((Dictionary) dictionaries.dictionaries.get(i))); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.falcon.regression.hcat; import org.apache.falcon.entity.v0.EntityType; import org.apache.falcon.entity.v0.Frequency; import org.apache.falcon.entity.v0.cluster.Interfacetype; import org.apache.falcon.entity.v0.feed.ActionType; import org.apache.falcon.entity.v0.feed.ClusterType; import org.apache.falcon.regression.Entities.FeedMerlin; import org.apache.falcon.regression.core.bundle.Bundle; import org.apache.falcon.regression.core.enumsAndConstants.FreqType; import org.apache.falcon.regression.core.helpers.ColoHelper; import org.apache.falcon.regression.core.response.ServiceResponse; import org.apache.falcon.regression.core.util.AssertUtil; import org.apache.falcon.regression.core.util.BundleUtil; import org.apache.falcon.regression.core.util.HCatUtil; import org.apache.falcon.regression.core.util.InstanceUtil; import org.apache.falcon.regression.core.util.OSUtil; import org.apache.falcon.regression.core.util.OozieUtil; import org.apache.falcon.regression.core.util.Util; import org.apache.falcon.regression.testHelper.BaseTestClass; import org.apache.hive.hcatalog.api.HCatClient; import org.apache.hive.hcatalog.api.HCatCreateTableDesc; import org.apache.hive.hcatalog.common.HCatException; import org.apache.hive.hcatalog.data.schema.HCatFieldSchema; import org.apache.oozie.client.Job; import org.apache.oozie.client.OozieClient; import org.testng.Assert; import org.testng.annotations.BeforeClass; import org.testng.annotations.BeforeMethod; import org.testng.annotations.AfterMethod; import org.testng.annotations.AfterClass; import org.testng.annotations.Test; import java.io.IOException; import java.util.ArrayList; import java.util.List; /** * Tests with operations with hcat feed. */ @Test(groups = "embedded") public class HCatFeedOperationsTest extends BaseTestClass { private ColoHelper cluster = servers.get(0); private OozieClient clusterOC = serverOC.get(0); private HCatClient clusterHC; private ColoHelper cluster2 = servers.get(1); private OozieClient cluster2OC = serverOC.get(1); private HCatClient cluster2HC; private String dbName = "default"; private String tableName = "hcatFeedOperationsTest"; private String randomTblName = "randomTable_HcatFeedOperationsTest"; private String feed; private String aggregateWorkflowDir = cleanAndGetTestDir() + "/aggregator"; @BeforeClass(alwaysRun = true) public void createTestData() throws Exception { uploadDirToClusters(aggregateWorkflowDir, OSUtil.RESOURCES_OOZIE); clusterHC = cluster.getClusterHelper().getHCatClient(); cluster2HC = cluster2.getClusterHelper().getHCatClient(); //create an empty table for feed operations ArrayList<HCatFieldSchema> partitions = new ArrayList<>(); partitions.add(HCatUtil.getStringSchema("year", "yearPartition")); createEmptyTable(clusterHC, dbName, tableName, partitions); //A random table to test submission of replication feed when table doesn't exist on target createEmptyTable(clusterHC, dbName, randomTblName, partitions); //create empty table on target cluster createEmptyTable(cluster2HC, dbName, tableName, new ArrayList<HCatFieldSchema>()); } @BeforeMethod(alwaysRun = true) public void setUp() throws Exception { Bundle bundle = BundleUtil.readHCatBundle(); bundles[0] = new Bundle(bundle, cluster.getPrefix()); bundles[0].generateUniqueBundle(this); bundles[0].setClusterInterface(Interfacetype.REGISTRY, cluster.getClusterHelper().getHCatEndpoint()); bundles[1] = new Bundle(bundle, cluster2.getPrefix()); bundles[1].generateUniqueBundle(this); bundles[1].setClusterInterface(Interfacetype.REGISTRY, cluster2.getClusterHelper().getHCatEndpoint()); } @AfterMethod(alwaysRun = true) public void tearDown() throws HCatException { removeTestClassEntities(); } @AfterClass(alwaysRun = true) public void tearDownClass() throws IOException { clusterHC.dropTable(dbName, tableName, true); clusterHC.dropTable(dbName, randomTblName, true); cluster2HC.dropTable(dbName, tableName, true); } /** * Submit Hcat feed when Hcat table mentioned in table uri does not exist. Response should reflect failure. * * @throws Exception */ @Test(groups = {"singleCluster"}) public void submitFeedWhenTableDoesNotExist() throws Exception { Bundle.submitCluster(bundles[1]); feed = bundles[1].getInputFeedFromBundle(); FeedMerlin feedObj = new FeedMerlin(feed); feedObj.setTableValue(dbName, randomTblName, FreqType.YEARLY.getHcatPathValue()); ServiceResponse response = prism.getFeedHelper().submitEntity(feedObj.toString()); AssertUtil.assertFailed(response); } /** * Submit Hcat feed when Hcat table mentioned in table uri exists. Delete that feed, and re-submit. * All responses should reflect success. * * @throws Exception */ @Test(groups = {"singleCluster"}) public void submitFeedPostDeletionWhenTableExists() throws Exception { Bundle.submitCluster(bundles[0]); feed = bundles[0].getInputFeedFromBundle(); FeedMerlin feedObj = new FeedMerlin(feed); feedObj.setTableValue(dbName, tableName, FreqType.YEARLY.getHcatPathValue()); ServiceResponse response = prism.getFeedHelper().submitEntity(feedObj.toString()); AssertUtil.assertSucceeded(response); response = prism.getFeedHelper().delete(feedObj.toString()); AssertUtil.assertSucceeded(response); response = prism.getFeedHelper().submitEntity(feedObj.toString()); AssertUtil.assertSucceeded(response); } /** * Submit Hcat Replication feed when Hcat table mentioned in table uri does not exist on target. The response is * Partial, with successful with submit/schedule on source. * * @throws Exception */ @Test public void submitAndScheduleReplicationFeedWhenTableDoesNotExistOnTarget() throws Exception { Bundle.submitCluster(bundles[0], bundles[1]); final String startDate = "2010-01-01T20:00Z"; final String endDate = "2099-01-01T00:00Z"; String tableUri = "catalog:" + dbName + ":" + randomTblName + "#year=${YEAR}"; bundles[0].setInputFeedPeriodicity(1, Frequency.TimeUnit.hours); bundles[0].setInputFeedValidity(startDate, endDate); bundles[0].setInputFeedTableUri(tableUri); feed = bundles[0].getDataSets().get(0); // set cluster 2 as the target. feed = FeedMerlin.fromString(feed).addFeedCluster( new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[1].getClusters().get(0))) .withRetention("months(9000)", ActionType.DELETE) .withValidity(startDate, endDate) .withClusterType(ClusterType.TARGET) .withTableUri(tableUri) .build()).toString(); AssertUtil.assertPartial(prism.getFeedHelper().submitAndSchedule(feed)); } /** * Submit Hcat Replication feed when Hcat table mentioned in table uri exists on both source and target. * The response is Psucceeded, and a replication co-rdinator should apear on target oozie. * The test however does not ensure that * replication goes through. * * @throws Exception */ @Test public void submitAndScheduleReplicationFeedWhenTableExistsOnSourceAndTarget() throws Exception { Bundle.submitCluster(bundles[0], bundles[1]); final String startDate = "2010-01-01T20:00Z"; final String endDate = "2099-01-01T00:00Z"; String tableUri = "catalog:" + dbName + ":" + tableName + "#year=${YEAR}"; bundles[0].setInputFeedPeriodicity(1, Frequency.TimeUnit.hours); bundles[0].setInputFeedValidity(startDate, endDate); bundles[0].setInputFeedTableUri(tableUri); feed = bundles[0].getDataSets().get(0); // set cluster 2 as the target. feed = FeedMerlin.fromString(feed).addFeedCluster( new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[1].getClusters().get(0))) .withRetention("months(9000)", ActionType.DELETE) .withValidity(startDate, endDate) .withClusterType(ClusterType.TARGET) .withTableUri(tableUri) .build()).toString(); AssertUtil.assertSucceeded(prism.getFeedHelper().submitAndSchedule(feed)); InstanceUtil.waitTillInstancesAreCreated(cluster2OC, feed, 0); Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(cluster2OC, Util.readEntityName(feed), "REPLICATION"), 1); //This test doesn't wait for replication to succeed. } /** * Submit Hcat Replication feed. Suspend the feed, and check that feed was suspended on * both clusters. Now resume feed, and check that status is running on both clusters. * The test however does not ensure that replication goes through. * * @throws Exception */ @Test public void suspendAndResumeReplicationFeed() throws Exception { submitAndScheduleReplicationFeedWhenTableExistsOnSourceAndTarget(); AssertUtil.assertSucceeded(prism.getFeedHelper().suspend(feed)); //check that feed suspended on both clusters AssertUtil.checkStatus(clusterOC, EntityType.FEED, feed, Job.Status.SUSPENDED); AssertUtil.checkStatus(cluster2OC, EntityType.FEED, feed, Job.Status.SUSPENDED); AssertUtil.assertSucceeded(prism.getFeedHelper().resume(feed)); AssertUtil.checkStatus(clusterOC, EntityType.FEED, feed, Job.Status.RUNNING); AssertUtil.checkStatus(cluster2OC, EntityType.FEED, feed, Job.Status.RUNNING); } /** * Submit Hcat Replication feed. Delete the feed, and check that feed was deleted on * both clusters. The test however does not ensure that replication goes through. * * @throws Exception */ @Test public void deleteReplicationFeed() throws Exception { submitAndScheduleReplicationFeedWhenTableExistsOnSourceAndTarget(); AssertUtil.assertSucceeded(prism.getFeedHelper().delete(feed)); AssertUtil.checkStatus(clusterOC, EntityType.FEED, feed, Job.Status.KILLED); AssertUtil.checkStatus(cluster2OC, EntityType.FEED, feed, Job.Status.KILLED); } private static void createEmptyTable(HCatClient cli, String dbName, String tabName, List<HCatFieldSchema> partitionCols) throws HCatException{ ArrayList<HCatFieldSchema> cols = new ArrayList<>(); cols.add(HCatUtil.getStringSchema("id", "id comment")); HCatCreateTableDesc tableDesc = HCatCreateTableDesc .create(dbName, tabName, cols) .partCols(partitionCols) .fileFormat("textfile") .ifNotExists(true) .isTableExternal(true) .build(); cli.createTable(tableDesc); } }
/* * Copyright (c) 2012, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.identity.sso.agent.saml; import org.apache.xerces.impl.Constants; import org.apache.xerces.util.SecurityManager; import org.apache.xml.security.signature.XMLSignature; import org.joda.time.DateTime; import org.opensaml.Configuration; import org.opensaml.DefaultBootstrap; import org.opensaml.common.SAMLVersion; import org.opensaml.common.xml.SAMLConstants; import org.opensaml.saml2.common.Extensions; import org.opensaml.saml2.core.*; import org.opensaml.saml2.core.impl.*; import org.opensaml.saml2.ecp.RelayState; import org.opensaml.saml2.encryption.Decrypter; import org.opensaml.xml.ConfigurationException; import org.opensaml.xml.XMLObject; import org.opensaml.xml.encryption.EncryptedKey; import org.opensaml.xml.io.*; import org.opensaml.xml.security.SecurityHelper; import org.opensaml.xml.security.credential.Credential; import org.opensaml.xml.security.keyinfo.KeyInfoCredentialResolver; import org.opensaml.xml.security.keyinfo.StaticKeyInfoCredentialResolver; import org.opensaml.xml.signature.SignatureValidator; import org.opensaml.xml.util.Base64; import org.opensaml.xml.util.XMLHelper; import org.opensaml.xml.validation.ValidationException; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.bootstrap.DOMImplementationRegistry; import org.w3c.dom.ls.DOMImplementationLS; import org.w3c.dom.ls.LSOutput; import org.w3c.dom.ls.LSSerializer; import org.wso2.carbon.identity.sso.agent.SSOAgentConstants; import org.wso2.carbon.identity.sso.agent.SSOAgentException; import org.wso2.carbon.identity.sso.agent.bean.LoggedInSessionBean; import org.wso2.carbon.identity.sso.agent.bean.SSOAgentConfig; import org.wso2.carbon.identity.sso.agent.util.CarbonEntityResolver; import org.wso2.carbon.identity.sso.agent.util.SAMLSignatureValidator; import org.wso2.carbon.identity.sso.agent.util.SSOAgentUtils; import org.xml.sax.SAXException; import javax.crypto.SecretKey; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import javax.xml.XMLConstants; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import java.io.*; import java.net.URLEncoder; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; import java.util.zip.Deflater; import java.util.zip.DeflaterOutputStream; /** * TODO: Need to have mechanism to map SP initiated SAML2 Request to SAML2 Responses and validate. * TODO: Still however IdP initiated SSO also should be possible through configuration */ public class SAML2SSOManager { private static final String SECURITY_MANAGER_PROPERTY = Constants.XERCES_PROPERTY_PREFIX + Constants.SECURITY_MANAGER_PROPERTY; private static final int ENTITY_EXPANSION_LIMIT = 0; private static Logger LOGGER = Logger.getLogger(SSOAgentConstants.LOGGER_NAME); private static volatile boolean bootStrapped = false; private SSOAgentConfig ssoAgentConfig = null; private static Object signatureValidator = null; public SAML2SSOManager(SSOAgentConfig ssoAgentConfig) throws SSOAgentException { /* Initializing the OpenSAML library, loading default configurations */ this.ssoAgentConfig = ssoAgentConfig; //load custom Signature Validator Class String signerClassName = ssoAgentConfig.getSAML2().getSignatureValidatorImplClass(); try { if (signerClassName != null) { signatureValidator = Class.forName(signerClassName).newInstance(); } } catch (ClassNotFoundException e) { throw new SSOAgentException("Error loading custom signature validator class", e); } catch (IllegalAccessException e) { throw new SSOAgentException("Error loading custom signature validator class", e); } catch (InstantiationException e) { throw new SSOAgentException("Error loading custom signature validator class", e); } try { if (!bootStrapped) { synchronized (this) { if (!bootStrapped) { DefaultBootstrap.bootstrap(); bootStrapped = true; } } } } catch (ConfigurationException e) { throw new SSOAgentException("Error while bootstrapping OpenSAML library", e); } } /** * Returns the redirection URL with the appended SAML2 * Request message * * @param request SAML 2 request * @return redirectionUrl */ public String buildRedirectRequest(HttpServletRequest request, boolean isLogout) throws SSOAgentException { RequestAbstractType requestMessage = null; if (!isLogout) { requestMessage = buildAuthnRequest(request); } else { LoggedInSessionBean sessionBean = (LoggedInSessionBean) request.getSession(false). getAttribute(SSOAgentConstants.SESSION_BEAN_NAME); if (sessionBean != null) { requestMessage = buildLogoutRequest(sessionBean.getSAML2SSO().getSubjectId(), sessionBean.getSAML2SSO().getSessionIndex()); } else { throw new SSOAgentException("SLO Request can not be built. SSO Session is NULL"); } } String idpUrl = null; String encodedRequestMessage = encodeRequestMessage( requestMessage, SAMLConstants.SAML2_REDIRECT_BINDING_URI); StringBuilder httpQueryString = new StringBuilder( SSOAgentConstants.SAML2SSO.HTTP_POST_PARAM_SAML2_AUTH_REQ + "=" + encodedRequestMessage); String relayState = ssoAgentConfig.getSAML2().getRelayState(); if (relayState != null) { try { httpQueryString.append("&" + RelayState.DEFAULT_ELEMENT_LOCAL_NAME + "=" + URLEncoder.encode(relayState, "UTF-8").trim()); } catch (UnsupportedEncodingException e) { throw new SSOAgentException("Error occurred while URLEncoding " + RelayState.DEFAULT_ELEMENT_LOCAL_NAME, e); } } if (ssoAgentConfig.getQueryParams() != null && !ssoAgentConfig.getQueryParams().isEmpty()) { StringBuilder builder = new StringBuilder(); for (Map.Entry<String, String[]> entry : ssoAgentConfig.getQueryParams().entrySet()) { if (entry.getKey() != null && entry.getValue() != null && entry.getValue().length > 0) { for (String param : entry.getValue()) { builder.append("&").append(entry.getKey()).append("=").append(param); } } } httpQueryString.append(builder); } if (ssoAgentConfig.getSAML2().isRequestSigned()) { SSOAgentUtils.addDeflateSignatureToHTTPQueryString(httpQueryString, new X509CredentialImpl(ssoAgentConfig.getSAML2().getSSOAgentX509Credential())); } if (ssoAgentConfig.getSAML2().getIdPURL().indexOf("?") > -1) { idpUrl = ssoAgentConfig.getSAML2().getIdPURL().concat("&").concat(httpQueryString.toString()); } else { idpUrl = ssoAgentConfig.getSAML2().getIdPURL().concat("?").concat(httpQueryString.toString()); } return idpUrl; } /** * Handles the request for http post binding * * @param request The HTTP request with SAML2 message * @param response The HTTP response * @param isLogout Whether the request is a logout request * @throws SSOAgentException */ public String buildPostRequest(HttpServletRequest request, HttpServletResponse response, boolean isLogout) throws SSOAgentException { RequestAbstractType requestMessage = null; if (!isLogout) { requestMessage = buildAuthnRequest(request); if (ssoAgentConfig.getSAML2().isRequestSigned()) { requestMessage = SSOAgentUtils.setSignature((AuthnRequest) requestMessage, XMLSignature.ALGO_ID_SIGNATURE_RSA, new X509CredentialImpl(ssoAgentConfig.getSAML2().getSSOAgentX509Credential())); } } else { LoggedInSessionBean sessionBean = (LoggedInSessionBean) request.getSession(false). getAttribute(SSOAgentConstants.SESSION_BEAN_NAME); if (sessionBean != null) { requestMessage = buildLogoutRequest(sessionBean.getSAML2SSO() .getSubjectId(), sessionBean.getSAML2SSO().getSessionIndex()); if (ssoAgentConfig.getSAML2().isRequestSigned()) { requestMessage = SSOAgentUtils.setSignature((LogoutRequest) requestMessage, XMLSignature.ALGO_ID_SIGNATURE_RSA, new X509CredentialImpl(ssoAgentConfig.getSAML2().getSSOAgentX509Credential())); } } else { throw new SSOAgentException("SLO Request can not be built. SSO Session is null"); } } String encodedRequestMessage = encodeRequestMessage(requestMessage, SAMLConstants.SAML2_POST_BINDING_URI); Map<String, String[]> paramsMap = new HashMap<String, String[]>(); paramsMap.put(SSOAgentConstants.SAML2SSO.HTTP_POST_PARAM_SAML2_AUTH_REQ, new String[]{encodedRequestMessage}); if (ssoAgentConfig.getSAML2().getRelayState() != null) { paramsMap.put(RelayState.DEFAULT_ELEMENT_LOCAL_NAME, new String[]{ssoAgentConfig.getSAML2().getRelayState()}); } //Add any additional parameters defined if (ssoAgentConfig.getQueryParams() != null && !ssoAgentConfig.getQueryParams().isEmpty()) { paramsMap.putAll(ssoAgentConfig.getQueryParams()); } StringBuilder htmlParams = new StringBuilder(); for (Map.Entry<String, String[]> entry : paramsMap.entrySet()) { if (entry.getKey() != null && entry.getValue() != null && entry.getValue().length > 0) { for (String param : entry.getValue()) { htmlParams.append("<input type='hidden' name='").append(entry.getKey()) .append("' value='").append(param).append("'>\n"); } } } String htmlPayload = ssoAgentConfig.getSAML2().getPostBindingRequestHTMLPayload(); if (htmlPayload == null || !htmlPayload.contains("<!--$saml_params-->")) { htmlPayload = "<html>\n" + "<body>\n" + "<p>You are now redirected back to " + ssoAgentConfig.getSAML2().getIdPURL() + " \n" + "If the redirection fails, please click the post button.</p>\n" + "<form method='post' action='" + ssoAgentConfig.getSAML2().getIdPURL() + "'>\n" + "<p>\n" + htmlParams.toString() + "<button type='submit'>POST</button>\n" + "</p>\n" + "</form>\n" + "<script type='text/javascript'>\n" + "document.forms[0].submit();\n" + "</script>\n" + "</body>\n" + "</html>"; } else { htmlPayload = htmlPayload.replace("<!--$saml_params-->", htmlParams.toString()); } return htmlPayload; } public void processResponse(HttpServletRequest request, HttpServletResponse response) throws SSOAgentException { String saml2SSOResponse = request.getParameter(SSOAgentConstants.SAML2SSO.HTTP_POST_PARAM_SAML2_RESP); if (saml2SSOResponse != null) { String decodedResponse = new String(Base64.decode(saml2SSOResponse)); XMLObject samlObject = unmarshall(decodedResponse); if (samlObject instanceof LogoutResponse) { //This is a SAML response for a single logout request from the SP doSLO(request); } else { processSSOResponse(request); } String relayState = request.getParameter(RelayState.DEFAULT_ELEMENT_LOCAL_NAME); if (relayState != null) { if (!relayState.isEmpty() && !"null".equalsIgnoreCase(relayState)) { //additional checks for incompetent IdPs ssoAgentConfig.getSAML2().setRelayState(relayState); } } } else { throw new SSOAgentException("Invalid SAML2 Response. SAML2 Response can not be null."); } } /** * This method handles the logout requests from the IdP * Any request for the defined logout URL is handled here * * @param request * @throws javax.servlet.ServletException * @throws IOException */ public void doSLO(HttpServletRequest request) throws SSOAgentException { XMLObject saml2Object = null; if (request.getParameter(SSOAgentConstants.SAML2SSO.HTTP_POST_PARAM_SAML2_AUTH_REQ) != null) { saml2Object = unmarshall(new String(Base64.decode(request.getParameter( SSOAgentConstants.SAML2SSO.HTTP_POST_PARAM_SAML2_AUTH_REQ)))); } if (saml2Object == null) { saml2Object = unmarshall(new String(Base64.decode(request.getParameter( SSOAgentConstants.SAML2SSO.HTTP_POST_PARAM_SAML2_RESP)))); } if (saml2Object instanceof LogoutRequest) { LogoutRequest logoutRequest = (LogoutRequest) saml2Object; String sessionIndex = logoutRequest.getSessionIndexes().get(0).getSessionIndex(); Set<HttpSession> sessions = SSOAgentSessionManager.invalidateAllSessions(sessionIndex); for (HttpSession session : sessions) { session.invalidate(); } } else if (saml2Object instanceof LogoutResponse) { if (request.getSession(false) != null) { /** * Not invalidating session explicitly since there may be other listeners * still waiting to get triggered and at the end of the chain session needs to be * invalidated by the system */ Set<HttpSession> sessions = SSOAgentSessionManager.invalidateAllSessions(request.getSession(false)); for (HttpSession session : sessions) { session.invalidate(); } } } else { throw new SSOAgentException("Invalid SAML2 Single Logout Request/Response"); } } protected void processSSOResponse(HttpServletRequest request) throws SSOAgentException { LoggedInSessionBean sessionBean = new LoggedInSessionBean(); sessionBean.setSAML2SSO(sessionBean.new SAML2SSO()); String saml2ResponseString = new String(Base64.decode(request.getParameter( SSOAgentConstants.SAML2SSO.HTTP_POST_PARAM_SAML2_RESP))); Response saml2Response = (Response) unmarshall(saml2ResponseString); sessionBean.getSAML2SSO().setResponseString(saml2ResponseString); sessionBean.getSAML2SSO().setSAMLResponse(saml2Response); Assertion assertion = null; if (ssoAgentConfig.getSAML2().isAssertionEncrypted()) { List<EncryptedAssertion> encryptedAssertions = saml2Response.getEncryptedAssertions(); EncryptedAssertion encryptedAssertion = null; if (encryptedAssertions != null && encryptedAssertions.size() > 0) { encryptedAssertion = encryptedAssertions.get(0); try { assertion = getDecryptedAssertion(encryptedAssertion); } catch (Exception e) { throw new SSOAgentException("Unable to decrypt the SAML2 Assertion"); } } } else { List<Assertion> assertions = saml2Response.getAssertions(); if (assertions != null && assertions.size() > 0) { assertion = assertions.get(0); } } if (assertion == null) { if (isNoPassive(saml2Response)) { LOGGER.log(Level.FINE, "Cannot authenticate in passive mode"); return; } throw new SSOAgentException("SAML2 Assertion not found in the Response"); } String idPEntityIdValue = assertion.getIssuer().getValue(); if (idPEntityIdValue == null || idPEntityIdValue.isEmpty()) { throw new SSOAgentException("SAML2 Response does not contain an Issuer value"); } else if (!idPEntityIdValue.equals(ssoAgentConfig.getSAML2().getIdPEntityId())) { throw new SSOAgentException("SAML2 Response Issuer verification failed"); } sessionBean.getSAML2SSO().setAssertion(assertion); // Cannot marshall SAML assertion here, before signature validation due to a weird issue in OpenSAML // Get the subject name from the Response Object and forward it to login_action.jsp String subject = null; if (assertion.getSubject() != null && assertion.getSubject().getNameID() != null) { subject = assertion.getSubject().getNameID().getValue(); } if (subject == null) { throw new SSOAgentException("SAML2 Response does not contain the name of the subject"); } sessionBean.getSAML2SSO().setSubjectId(subject); // set the subject request.getSession().setAttribute(SSOAgentConstants.SESSION_BEAN_NAME, sessionBean); // validate audience restriction validateAudienceRestriction(assertion); // validate signature validateSignature(saml2Response, assertion); // Marshalling SAML2 assertion after signature validation due to a weird issue in OpenSAML sessionBean.getSAML2SSO().setAssertionString(marshall(assertion)); ((LoggedInSessionBean) request.getSession().getAttribute( SSOAgentConstants.SESSION_BEAN_NAME)).getSAML2SSO(). setSubjectAttributes(getAssertionStatements(assertion)); //For removing the session when the single sign out request made by the SP itself if (ssoAgentConfig.getSAML2().isSLOEnabled()) { String sessionId = assertion.getAuthnStatements().get(0).getSessionIndex(); if (sessionId == null) { throw new SSOAgentException("Single Logout is enabled but IdP Session ID not found in SAML2 Assertion"); } ((LoggedInSessionBean) request.getSession().getAttribute( SSOAgentConstants.SESSION_BEAN_NAME)).getSAML2SSO().setSessionIndex(sessionId); SSOAgentSessionManager.addAuthenticatedSession(request.getSession(false)); } request.getSession().setAttribute(SSOAgentConstants.SESSION_BEAN_NAME, sessionBean); } protected LogoutRequest buildLogoutRequest(String user, String sessionIdx) throws SSOAgentException { LogoutRequest logoutReq = new LogoutRequestBuilder().buildObject(); logoutReq.setID(SSOAgentUtils.createID()); logoutReq.setDestination(ssoAgentConfig.getSAML2().getIdPURL()); DateTime issueInstant = new DateTime(); logoutReq.setIssueInstant(issueInstant); logoutReq.setNotOnOrAfter(new DateTime(issueInstant.getMillis() + 5 * 60 * 1000)); IssuerBuilder issuerBuilder = new IssuerBuilder(); Issuer issuer = issuerBuilder.buildObject(); issuer.setValue(ssoAgentConfig.getSAML2().getSPEntityId()); logoutReq.setIssuer(issuer); NameID nameId = new NameIDBuilder().buildObject(); nameId.setFormat("urn:oasis:names:tc:SAML:2.0:nameid-format:entity"); nameId.setValue(user); logoutReq.setNameID(nameId); SessionIndex sessionIndex = new SessionIndexBuilder().buildObject(); sessionIndex.setSessionIndex(sessionIdx); logoutReq.getSessionIndexes().add(sessionIndex); logoutReq.setReason("Single Logout"); return logoutReq; } protected AuthnRequest buildAuthnRequest(HttpServletRequest request) throws SSOAgentException { IssuerBuilder issuerBuilder = new IssuerBuilder(); Issuer issuer = issuerBuilder.buildObject("urn:oasis:names:tc:SAML:2.0:assertion", "Issuer", "samlp"); issuer.setValue(ssoAgentConfig.getSAML2().getSPEntityId()); /* NameIDPolicy */ NameIDPolicyBuilder nameIdPolicyBuilder = new NameIDPolicyBuilder(); NameIDPolicy nameIdPolicy = nameIdPolicyBuilder.buildObject(); nameIdPolicy.setFormat("urn:oasis:names:tc:SAML:2.0:nameid-format:persistent"); nameIdPolicy.setSPNameQualifier("Issuer"); nameIdPolicy.setAllowCreate(true); /* AuthnContextClass */ AuthnContextClassRefBuilder authnContextClassRefBuilder = new AuthnContextClassRefBuilder(); AuthnContextClassRef authnContextClassRef = authnContextClassRefBuilder.buildObject("urn:oasis:names:tc:SAML:2.0:assertion", "AuthnContextClassRef", "saml"); authnContextClassRef.setAuthnContextClassRef("urn:oasis:names:tc:SAML:2.0:ac:classes:PasswordProtectedTransport"); /* AuthnContex */ RequestedAuthnContextBuilder requestedAuthnContextBuilder = new RequestedAuthnContextBuilder(); RequestedAuthnContext requestedAuthnContext = requestedAuthnContextBuilder.buildObject(); requestedAuthnContext.setComparison(AuthnContextComparisonTypeEnumeration.EXACT); requestedAuthnContext.getAuthnContextClassRefs().add(authnContextClassRef); DateTime issueInstant = new DateTime(); /* Creation of AuthRequestObject */ AuthnRequestBuilder authRequestBuilder = new AuthnRequestBuilder(); AuthnRequest authRequest = authRequestBuilder.buildObject("urn:oasis:names:tc:SAML:2.0:protocol", "AuthnRequest", "samlp"); authRequest.setForceAuthn(ssoAgentConfig.getSAML2().isForceAuthn()); authRequest.setIsPassive(ssoAgentConfig.getSAML2().isPassiveAuthn()); authRequest.setIssueInstant(issueInstant); authRequest.setProtocolBinding(ssoAgentConfig.getSAML2().getHttpBinding()); authRequest.setAssertionConsumerServiceURL(ssoAgentConfig.getSAML2().getACSURL()); authRequest.setIssuer(issuer); authRequest.setNameIDPolicy(nameIdPolicy); authRequest.setRequestedAuthnContext(requestedAuthnContext); authRequest.setID(SSOAgentUtils.createID()); authRequest.setVersion(SAMLVersion.VERSION_20); authRequest.setDestination(ssoAgentConfig.getSAML2().getIdPURL()); if (request.getAttribute(Extensions.LOCAL_NAME) != null) { authRequest.setExtensions((Extensions) request.getAttribute(Extensions.LOCAL_NAME)); } /* Requesting Attributes. This Index value is registered in the IDP */ if (ssoAgentConfig.getSAML2().getAttributeConsumingServiceIndex() != null && ssoAgentConfig.getSAML2().getAttributeConsumingServiceIndex().trim().length() > 0) { authRequest.setAttributeConsumingServiceIndex(Integer.parseInt( ssoAgentConfig.getSAML2().getAttributeConsumingServiceIndex())); } return authRequest; } protected String encodeRequestMessage(RequestAbstractType requestMessage, String binding) throws SSOAgentException { Marshaller marshaller = Configuration.getMarshallerFactory().getMarshaller(requestMessage); Element authDOM = null; try { authDOM = marshaller.marshall(requestMessage); StringWriter rspWrt = new StringWriter(); XMLHelper.writeNode(authDOM, rspWrt); if (SAMLConstants.SAML2_REDIRECT_BINDING_URI.equals(binding)) { //Compress the message, Base 64 encode and URL encode Deflater deflater = new Deflater(Deflater.DEFLATED, true); ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); DeflaterOutputStream deflaterOutputStream = new DeflaterOutputStream (byteArrayOutputStream, deflater); deflaterOutputStream.write(rspWrt.toString().getBytes()); deflaterOutputStream.close(); String encodedRequestMessage = Base64.encodeBytes(byteArrayOutputStream .toByteArray(), Base64.DONT_BREAK_LINES); return URLEncoder.encode(encodedRequestMessage, "UTF-8").trim(); } else if (SAMLConstants.SAML2_POST_BINDING_URI.equals(binding)) { return Base64.encodeBytes(rspWrt.toString().getBytes(), Base64.DONT_BREAK_LINES); } else { LOGGER.log(Level.FINE, "Unsupported SAML2 HTTP Binding. Defaulting to " + SAMLConstants.SAML2_POST_BINDING_URI); return Base64.encodeBytes(rspWrt.toString().getBytes(), Base64.DONT_BREAK_LINES); } } catch (MarshallingException e) { throw new SSOAgentException("Error occurred while encoding SAML2 request", e); } catch (UnsupportedEncodingException e) { throw new SSOAgentException("Error occurred while encoding SAML2 request", e); } catch (IOException e) { throw new SSOAgentException("Error occurred while encoding SAML2 request", e); } } protected XMLObject unmarshall(String saml2SSOString) throws SSOAgentException { try { String decodedString = decodeHTMLCharacters(saml2SSOString); DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); documentBuilderFactory.setNamespaceAware(true); documentBuilderFactory.setExpandEntityReferences(false); documentBuilderFactory.setFeature(XMLConstants.FEATURE_SECURE_PROCESSING, true); SecurityManager securityManager = new SecurityManager(); securityManager.setEntityExpansionLimit(ENTITY_EXPANSION_LIMIT); documentBuilderFactory.setAttribute(SECURITY_MANAGER_PROPERTY, securityManager); DocumentBuilder docBuilder = documentBuilderFactory.newDocumentBuilder(); docBuilder.setEntityResolver(new CarbonEntityResolver()); ByteArrayInputStream is = new ByteArrayInputStream(decodedString.getBytes()); Document document = docBuilder.parse(is); Element element = document.getDocumentElement(); UnmarshallerFactory unmarshallerFactory = Configuration.getUnmarshallerFactory(); Unmarshaller unmarshaller = unmarshallerFactory.getUnmarshaller(element); return unmarshaller.unmarshall(element); } catch (ParserConfigurationException e) { throw new SSOAgentException("Error in unmarshalling SAML2SSO Request from the encoded String", e); } catch (UnmarshallingException e) { throw new SSOAgentException("Error in unmarshalling SAML2SSO Request from the encoded String", e); } catch (SAXException e) { throw new SSOAgentException("Error in unmarshalling SAML2SSO Request from the encoded String", e); } catch (IOException e) { throw new SSOAgentException("Error in unmarshalling SAML2SSO Request from the encoded String", e); } } private String decodeHTMLCharacters(String encodedStr) { return encodedStr.replaceAll("&amp;", "&").replaceAll("&lt;", "<").replaceAll("&gt;", ">") .replaceAll("&quot;", "\"").replaceAll("&apos;", "'"); } /* * Process the response and returns the results */ private Map<String, String> getAssertionStatements(Assertion assertion) { Map<String, String> results = new HashMap<String, String>(); if (assertion != null) { List<AttributeStatement> attributeStatementList = assertion.getAttributeStatements(); if (attributeStatementList != null) { for (AttributeStatement statement : attributeStatementList) { List<Attribute> attributesList = statement.getAttributes(); for (Attribute attribute : attributesList) { Element value = attribute.getAttributeValues().get(0).getDOM(); String attributeValue = value.getTextContent(); results.put(attribute.getName(), attributeValue); } } } } return results; } /** * Validate the AudienceRestriction of SAML2 Response * * @param assertion SAML2 Assertion * @return validity */ protected void validateAudienceRestriction(Assertion assertion) throws SSOAgentException { if (assertion != null) { Conditions conditions = assertion.getConditions(); if (conditions != null) { List<AudienceRestriction> audienceRestrictions = conditions.getAudienceRestrictions(); if (audienceRestrictions != null && !audienceRestrictions.isEmpty()) { boolean audienceFound = false; for (AudienceRestriction audienceRestriction : audienceRestrictions) { if (audienceRestriction.getAudiences() != null && audienceRestriction.getAudiences().size() > 0) { for (Audience audience : audienceRestriction.getAudiences()) { if (ssoAgentConfig.getSAML2().getSPEntityId().equals(audience.getAudienceURI())) { audienceFound = true; break; } } } if (audienceFound) { break; } } if (!audienceFound) { throw new SSOAgentException("SAML2 Assertion Audience Restriction validation failed"); } } else { throw new SSOAgentException("SAML2 Response doesn't contain AudienceRestrictions"); } } else { throw new SSOAgentException("SAML2 Response doesn't contain Conditions"); } } } /** * Validate the signature of a SAML2 Response and Assertion * * @param response SAML2 Response * @return true, if signature is valid. */ protected void validateSignature(Response response, Assertion assertion) throws SSOAgentException { if (signatureValidator != null) { //Custom implemetation of signature validation SAMLSignatureValidator signatureValidatorUtility = (SAMLSignatureValidator) signatureValidator; signatureValidatorUtility.validateSignature(response, assertion, ssoAgentConfig); } else { //If custom implementation not found, Execute the default implementation if (ssoAgentConfig.getSAML2().isResponseSigned()) { if (response.getSignature() == null) { throw new SSOAgentException("SAML2 Response signing is enabled, but signature element not found in SAML2 Response element"); } else { try { SignatureValidator validator = new SignatureValidator( new X509CredentialImpl(ssoAgentConfig.getSAML2().getSSOAgentX509Credential())); validator.validate(response.getSignature()); } catch (ValidationException e) { throw new SSOAgentException("Signature validation failed for SAML2 Response"); } } } if (ssoAgentConfig.getSAML2().isAssertionSigned()) { if (assertion.getSignature() == null) { throw new SSOAgentException("SAML2 Assertion signing is enabled, but signature element not found in SAML2 Assertion element"); } else { try { SignatureValidator validator = new SignatureValidator( new X509CredentialImpl(ssoAgentConfig.getSAML2().getSSOAgentX509Credential())); validator.validate(assertion.getSignature()); } catch (ValidationException e) { throw new SSOAgentException("Signature validation failed for SAML2 Assertion"); } } } } } /** * Serialize the Auth. Request * * @param xmlObject * @return serialized auth. req */ protected String marshall(XMLObject xmlObject) throws SSOAgentException { try { System.setProperty("javax.xml.parsers.DocumentBuilderFactory", "org.apache.xerces.jaxp.DocumentBuilderFactoryImpl"); MarshallerFactory marshallerFactory = org.opensaml.xml.Configuration.getMarshallerFactory(); Marshaller marshaller = marshallerFactory.getMarshaller(xmlObject); Element element = marshaller.marshall(xmlObject); ByteArrayOutputStream byteArrayOutputStrm = new ByteArrayOutputStream(); DOMImplementationRegistry registry = DOMImplementationRegistry.newInstance(); DOMImplementationLS impl = (DOMImplementationLS) registry.getDOMImplementation("LS"); LSSerializer writer = impl.createLSSerializer(); LSOutput output = impl.createLSOutput(); output.setByteStream(byteArrayOutputStrm); writer.write(element, output); return byteArrayOutputStrm.toString(); } catch (ClassNotFoundException e) { throw new SSOAgentException("Error in marshalling SAML2 Assertion", e); } catch (InstantiationException e) { throw new SSOAgentException("Error in marshalling SAML2 Assertion", e); } catch (MarshallingException e) { throw new SSOAgentException("Error in marshalling SAML2 Assertion", e); } catch (IllegalAccessException e) { throw new SSOAgentException("Error in marshalling SAML2 Assertion", e); } } /** * Get Decrypted Assertion * * @param encryptedAssertion * @return * @throws Exception */ protected Assertion getDecryptedAssertion(EncryptedAssertion encryptedAssertion) throws Exception { KeyInfoCredentialResolver keyResolver = new StaticKeyInfoCredentialResolver( new X509CredentialImpl(ssoAgentConfig.getSAML2().getSSOAgentX509Credential())); EncryptedKey key = encryptedAssertion.getEncryptedData(). getKeyInfo().getEncryptedKeys().get(0); Decrypter decrypter = new Decrypter(null, keyResolver, null); SecretKey dkey = (SecretKey) decrypter.decryptKey(key, encryptedAssertion.getEncryptedData(). getEncryptionMethod().getAlgorithm()); Credential shared = SecurityHelper.getSimpleCredential(dkey); decrypter = new Decrypter(new StaticKeyInfoCredentialResolver(shared), null, null); decrypter.setRootInNewDocument(true); return decrypter.decrypt(encryptedAssertion); } protected boolean isNoPassive(Response response) { return response.getStatus() != null && response.getStatus().getStatusCode() != null && response.getStatus().getStatusCode().getValue().equals(StatusCode.RESPONDER_URI) && response.getStatus().getStatusCode().getStatusCode() != null && response.getStatus().getStatusCode().getStatusCode().getValue().equals( StatusCode.NO_PASSIVE_URI); } public SSOAgentConfig getSsoAgentConfig() { return ssoAgentConfig; } }
/* * Copyright (c) 2002-2018 "Neo Technology," * Network Engine for Objects in Lund AB [http://neotechnology.com] * * This file is part of Neo4j. * * Neo4j is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.neo4j.server.rest.security; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import javax.servlet.Filter; import javax.servlet.FilterChain; import javax.servlet.FilterConfig; import javax.servlet.ServletException; import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; public class SecurityFilter implements Filter { private final HashMap<UriPathWildcardMatcher, HashSet<ForbiddingSecurityRule>> rules = new HashMap<UriPathWildcardMatcher, HashSet<ForbiddingSecurityRule>>(); public SecurityFilter( SecurityRule rule, SecurityRule... rules ) { this( merge( rule, rules ) ); } public SecurityFilter( Iterable<SecurityRule> securityRules ) { // For backwards compatibility for ( SecurityRule r : securityRules ) { String rulePath = r.forUriPath(); if ( !rulePath.endsWith( "*" ) ) { rulePath = rulePath + "*"; } UriPathWildcardMatcher uriPathWildcardMatcher = new UriPathWildcardMatcher( rulePath ); HashSet<ForbiddingSecurityRule> ruleHashSet = rules.get( uriPathWildcardMatcher ); if ( ruleHashSet == null ) { ruleHashSet = new HashSet<ForbiddingSecurityRule>(); rules.put( uriPathWildcardMatcher, ruleHashSet ); } ruleHashSet.add( fromSecurityRule(r) ); } } private static ForbiddingSecurityRule fromSecurityRule(final SecurityRule rule) { if (rule instanceof ForbiddingSecurityRule) { return (ForbiddingSecurityRule) rule; } return new ForbiddenRuleDecorator(rule); } private static Iterable<SecurityRule> merge( SecurityRule rule, SecurityRule[] rules ) { ArrayList<SecurityRule> result = new ArrayList<SecurityRule>(); result.add( rule ); Collections.addAll( result, rules ); return result; } @Override public void init( FilterConfig filterConfig ) throws ServletException { } @Override public void doFilter( ServletRequest request, ServletResponse response, FilterChain chain ) throws IOException, ServletException { validateRequestType( request ); validateResponseType( response ); HttpServletRequest httpReq = (HttpServletRequest) request; String path = httpReq.getContextPath() + (httpReq.getPathInfo() == null ? "" : httpReq.getPathInfo()); boolean requestIsForbidden = false; for ( UriPathWildcardMatcher uriPathWildcardMatcher : rules.keySet() ) { if ( uriPathWildcardMatcher.matches( path ) ) { HashSet<ForbiddingSecurityRule> securityRules = rules.get( uriPathWildcardMatcher ); for ( ForbiddingSecurityRule securityRule : securityRules ) { // 401 on the first failed rule we come along if ( !securityRule.isAuthorized( httpReq ) ) { createUnauthorizedChallenge( response, securityRule ); return; } requestIsForbidden |= securityRule.isForbidden(httpReq); } } } if (requestIsForbidden) { createForbiddenResponse( response ); return; } chain.doFilter( request, response ); } private void validateRequestType( ServletRequest request ) throws ServletException { if ( !(request instanceof HttpServletRequest) ) { throw new ServletException( String.format( "Expected HttpServletRequest, received [%s]", request.getClass() .getCanonicalName() ) ); } } private void validateResponseType( ServletResponse response ) throws ServletException { if ( !(response instanceof HttpServletResponse) ) { throw new ServletException( String.format( "Expected HttpServletResponse, received [%s]", response.getClass() .getCanonicalName() ) ); } } private void createUnauthorizedChallenge( ServletResponse response, SecurityRule rule ) { HttpServletResponse httpServletResponse = (HttpServletResponse) response; httpServletResponse.setStatus( 401 ); httpServletResponse.addHeader( "WWW-Authenticate", rule.wwwAuthenticateHeader() ); } private void createForbiddenResponse( ServletResponse response ) { HttpServletResponse httpServletResponse = (HttpServletResponse) response; httpServletResponse.setStatus(403); } @Override public synchronized void destroy() { rules.clear(); } public static String basicAuthenticationResponse( String realm ) { return "Basic realm=\"" + realm + "\""; } private static class ForbiddenRuleDecorator implements ForbiddingSecurityRule { private final SecurityRule innerRule; public ForbiddenRuleDecorator(SecurityRule rule) { this.innerRule = rule; } @Override public boolean isForbidden(HttpServletRequest request) { return false; } @Override public boolean isAuthorized(HttpServletRequest request) { return innerRule.isAuthorized(request); } @Override public String forUriPath() { return innerRule.forUriPath(); } @Override public String wwwAuthenticateHeader() { return innerRule.wwwAuthenticateHeader(); } } }
/** * Copyright 2014 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package rx.internal.util; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicInteger; import org.junit.Test; import rx.Scheduler; import rx.Subscription; import rx.functions.Action0; import rx.functions.Function; import rx.schedulers.Schedulers; public class IndexedRingBufferTest { @Test public void add() { @SuppressWarnings("unchecked") IndexedRingBuffer<LSubscription> list = IndexedRingBuffer.getInstance(); list.add(new LSubscription(1)); list.add(new LSubscription(2)); final AtomicInteger c = new AtomicInteger(); list.forEach(newCounterAction(c)); assertEquals(2, c.get()); } @Test public void removeEnd() { @SuppressWarnings("unchecked") IndexedRingBuffer<LSubscription> list = IndexedRingBuffer.getInstance(); list.add(new LSubscription(1)); int n2 = list.add(new LSubscription(2)); final AtomicInteger c = new AtomicInteger(); list.forEach(newCounterAction(c)); assertEquals(2, c.get()); list.remove(n2); final AtomicInteger c2 = new AtomicInteger(); list.forEach(newCounterAction(c2)); assertEquals(1, c2.get()); } @Test public void removeMiddle() { @SuppressWarnings("unchecked") IndexedRingBuffer<LSubscription> list = IndexedRingBuffer.getInstance(); list.add(new LSubscription(1)); int n2 = list.add(new LSubscription(2)); list.add(new LSubscription(3)); list.remove(n2); final AtomicInteger c = new AtomicInteger(); list.forEach(newCounterAction(c)); assertEquals(2, c.get()); } @Test public void addRemoveAdd() { @SuppressWarnings("unchecked") IndexedRingBuffer<String> list = IndexedRingBuffer.getInstance(); list.add("one"); list.add("two"); list.add("three"); ArrayList<String> values = new ArrayList<String>(); list.forEach(accumulate(values)); assertEquals(3, values.size()); assertEquals("one", values.get(0)); assertEquals("two", values.get(1)); assertEquals("three", values.get(2)); list.remove(1); values.clear(); list.forEach(accumulate(values)); assertEquals(2, values.size()); assertEquals("one", values.get(0)); assertEquals("three", values.get(1)); list.add("four"); values.clear(); list.forEach(accumulate(values)); assertEquals(3, values.size()); assertEquals("one", values.get(0)); assertEquals("four", values.get(1)); assertEquals("three", values.get(2)); final AtomicInteger c = new AtomicInteger(); list.forEach(newCounterAction(c)); assertEquals(3, c.get()); } @Test public void addThousands() { String s = "s"; @SuppressWarnings("unchecked") IndexedRingBuffer<String> list = IndexedRingBuffer.getInstance(); for (int i = 0; i < 10000; i++) { list.add(s); } AtomicInteger c = new AtomicInteger(); list.forEach(newCounterAction(c)); assertEquals(10000, c.get()); list.remove(5000); c.set(0); list.forEach(newCounterAction(c)); assertEquals(9999, c.get()); list.add("one"); list.add("two"); c.set(0); // list.forEach(print()); list.forEach(newCounterAction(c)); assertEquals(10001, c.get()); } @Test public void testForEachWithIndex() { @SuppressWarnings("unchecked") IndexedRingBuffer<String> buffer = IndexedRingBuffer.getInstance(); buffer.add("zero"); buffer.add("one"); buffer.add("two"); buffer.add("three"); final ArrayList<String> list = new ArrayList<String>(); int nextIndex = buffer.forEach(accumulate(list)); assertEquals(4, list.size()); assertEquals(list, Arrays.asList("zero", "one", "two", "three")); assertEquals(0, nextIndex); list.clear(); nextIndex = buffer.forEach(accumulate(list), 0); assertEquals(4, list.size()); assertEquals(list, Arrays.asList("zero", "one", "two", "three")); assertEquals(0, nextIndex); list.clear(); nextIndex = buffer.forEach(accumulate(list), 2); assertEquals(4, list.size()); assertEquals(list, Arrays.asList("two", "three", "zero", "one")); assertEquals(2, nextIndex); // 2, 3, 0, 1 list.clear(); nextIndex = buffer.forEach(accumulate(list), 3); assertEquals(4, list.size()); assertEquals(list, Arrays.asList("three", "zero", "one", "two")); assertEquals(3, nextIndex); // 3, 0, 1, 2 list.clear(); nextIndex = buffer.forEach(new Function<String, Boolean>() { @Override public Boolean call(String t1) { list.add(t1); return false; } }, 3); assertEquals(1, list.size()); assertEquals(list, Arrays.asList("three")); assertEquals(3, nextIndex); // we ended early so we'll go back to this index again next time list.clear(); nextIndex = buffer.forEach(new Function<String, Boolean>() { int i = 0; @Override public Boolean call(String t1) { list.add(t1); if (i++ == 2) { return false; } else { return true; } } }, 0); assertEquals(3, list.size()); assertEquals(list, Arrays.asList("zero", "one", "two")); assertEquals(2, nextIndex); // 0, 1, 2 (// we ended early so we'll go back to the last index again next time) } @Test public void testForEachAcrossSections() { @SuppressWarnings("unchecked") IndexedRingBuffer<Integer> buffer = IndexedRingBuffer.getInstance(); for (int i = 0; i < 10000; i++) { buffer.add(i); } final ArrayList<Integer> list = new ArrayList<Integer>(); int nextIndex = buffer.forEach(accumulate(list), 5000); assertEquals(10000, list.size()); assertEquals(Integer.valueOf(5000), list.get(0)); assertEquals(Integer.valueOf(9999), list.get(4999)); assertEquals(Integer.valueOf(0), list.get(5000)); assertEquals(Integer.valueOf(4999), list.get(9999)); assertEquals(5000, nextIndex); } @Test public void longRunningAddRemoveAddDoesntLeakMemory() { String s = "s"; @SuppressWarnings("unchecked") IndexedRingBuffer<String> list = IndexedRingBuffer.getInstance(); for (int i = 0; i < 20000; i++) { int index = list.add(s); list.remove(index); } AtomicInteger c = new AtomicInteger(); list.forEach(newCounterAction(c)); assertEquals(0, c.get()); // System.out.println("Index is: " + list.index.get() + " when it should be no bigger than " + list.SIZE); assertTrue(list.index.get() < list.SIZE); // it should actually be 1 since we only did add/remove sequentially assertEquals(1, list.index.get()); } @Test public void testConcurrentAdds() throws InterruptedException { @SuppressWarnings("unchecked") final IndexedRingBuffer<Integer> list = IndexedRingBuffer.getInstance(); Scheduler.Worker w1 = Schedulers.computation().createWorker(); Scheduler.Worker w2 = Schedulers.computation().createWorker(); final CountDownLatch latch = new CountDownLatch(2); w1.schedule(new Action0() { @Override public void call() { for (int i = 0; i < 10000; i++) { list.add(i); } latch.countDown(); } }); w2.schedule(new Action0() { @Override public void call() { for (int i = 10000; i < 20000; i++) { list.add(i); } latch.countDown(); } }); latch.await(); w1.unsubscribe(); w2.unsubscribe(); AtomicInteger c = new AtomicInteger(); list.forEach(newCounterAction(c)); assertEquals(20000, c.get()); ArrayList<Integer> values = new ArrayList<Integer>(); list.forEach(accumulate(values)); Collections.sort(values); int j = 0; for (int i : values) { assertEquals(i, j++); } } @Test public void testConcurrentAddAndRemoves() throws InterruptedException { @SuppressWarnings("unchecked") final IndexedRingBuffer<Integer> list = IndexedRingBuffer.getInstance(); final List<Exception> exceptions = Collections.synchronizedList(new ArrayList<Exception>()); Scheduler.Worker w1 = Schedulers.computation().createWorker(); Scheduler.Worker w2 = Schedulers.computation().createWorker(); final CountDownLatch latch = new CountDownLatch(2); w1.schedule(new Action0() { @Override public void call() { try { for (int i = 10000; i < 20000; i++) { list.add(i); // Integer v = list.remove(index); } } catch (Exception e) { e.printStackTrace(); exceptions.add(e); } latch.countDown(); } }); w2.schedule(new Action0() { @Override public void call() { try { for (int i = 0; i < 10000; i++) { int index = list.add(i); // cause some random remove/add interference Integer v = list.remove(index); if (v == null) { throw new RuntimeException("should not get null"); } list.add(v); } } catch (Exception e) { e.printStackTrace(); exceptions.add(e); } latch.countDown(); } }); latch.await(); w1.unsubscribe(); w2.unsubscribe(); AtomicInteger c = new AtomicInteger(); list.forEach(newCounterAction(c)); assertEquals(20000, c.get()); ArrayList<Integer> values = new ArrayList<Integer>(); list.forEach(accumulate(values)); Collections.sort(values); int j = 0; for (int i : values) { assertEquals(i, j++); } if (exceptions.size() > 0) { System.out.println("Exceptions: " + exceptions); } assertEquals(0, exceptions.size()); } private <T> Function<T, Boolean> accumulate(final ArrayList<T> list) { return new Function<T, Boolean>() { @Override public Boolean call(T t1) { list.add(t1); return true; } }; } @SuppressWarnings("unused") private Function<Object, Boolean> print() { return new Function<Object, Boolean>() { @Override public Boolean call(Object t1) { System.out.println("Object: " + t1); return true; } }; } private Function<Object, Boolean> newCounterAction(final AtomicInteger c) { return new Function<Object, Boolean>() { @Override public Boolean call(Object t1) { c.incrementAndGet(); return true; } }; } public static class LSubscription implements Subscription { private final int n; public LSubscription(int n) { this.n = n; } @Override public void unsubscribe() { } @Override public boolean isUnsubscribed() { return false; } @Override public String toString() { return "Subscription=>" + n; } } }
// Copyright 2017 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.rules.android; import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertWithMessage; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.devtools.build.lib.actions.ActionAnalysisMetadata; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.analysis.ConfiguredTarget; import com.google.devtools.build.lib.analysis.RuleContext; import com.google.devtools.build.lib.packages.RuleClass.ConfiguredTargetFactory.RuleErrorException; import com.google.devtools.build.lib.rules.android.AndroidResourcesTest.WithPlatforms; import com.google.devtools.build.lib.rules.android.AndroidResourcesTest.WithoutPlatforms; import com.google.devtools.build.lib.rules.android.databinding.DataBinding; import com.google.devtools.build.lib.rules.android.databinding.DataBindingContext; import com.google.devtools.build.lib.vfs.PathFragment; import java.util.Optional; import java.util.Set; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import org.junit.runners.Suite; import org.junit.runners.Suite.SuiteClasses; /** Tests {@link AndroidResources} */ @RunWith(Suite.class) @SuiteClasses({WithoutPlatforms.class, WithPlatforms.class}) public abstract class AndroidResourcesTest extends ResourceTestBase { /** Use legacy toolchain resolution. */ @RunWith(JUnit4.class) public static class WithoutPlatforms extends AndroidResourcesTest {} /** Use platform-based toolchain resolution. */ @RunWith(JUnit4.class) public static class WithPlatforms extends AndroidResourcesTest { @Override protected boolean platformBasedToolchains() { return true; } } private static final PathFragment DEFAULT_RESOURCE_ROOT = PathFragment.create(RESOURCE_ROOT); private static final ImmutableList<PathFragment> RESOURCES_ROOTS = ImmutableList.of(DEFAULT_RESOURCE_ROOT); @Before public void setupCcToolchain() throws Exception { getAnalysisMock().ccSupport().setupCcToolchainConfigForCpu(mockToolsConfig, "armeabi-v7a"); } @Before @Test public void testGetResourceRootsNoResources() throws Exception { assertThat(getResourceRoots()).isEmpty(); } @Test public void testGetResourceRootsInvalidResourceDirectory() throws Exception { try { getResourceRoots("is-this-drawable-or-values/foo.xml"); assertWithMessage("Expected exception not thrown!").fail(); } catch (RuleErrorException e) { // expected } errorConsumer.assertAttributeError( "resource_files", "is not in the expected resource directory structure"); } @Test public void testGetResourceRootsMultipleRoots() throws Exception { try { getResourceRoots("subdir/values/foo.xml", "otherdir/values/bar.xml"); assertWithMessage("Expected exception not thrown!").fail(); } catch (RuleErrorException e) { // expected } errorConsumer.assertAttributeError( "resource_files", "All resources must share a common directory"); } @Test public void testGetResourceRoots() throws Exception { assertThat(getResourceRoots("values-hdpi/foo.xml", "values-mdpi/bar.xml")) .isEqualTo(RESOURCES_ROOTS); } @Test public void testGetResourceRootsCommonSubdirectory() throws Exception { assertThat(getResourceRoots("subdir/values-hdpi/foo.xml", "subdir/values-mdpi/bar.xml")) .containsExactly(DEFAULT_RESOURCE_ROOT.getRelative("subdir")); } private ImmutableList<PathFragment> getResourceRoots(String... pathResourceStrings) throws Exception { return getResourceRoots(getResources(pathResourceStrings)); } private ImmutableList<PathFragment> getResourceRoots(ImmutableList<Artifact> artifacts) throws Exception { return AndroidResources.getResourceRoots(errorConsumer, artifacts, "resource_files"); } @Test public void testFilterEmpty() throws Exception { assertFilter(ImmutableList.of(), ImmutableList.of()); } @Test public void testFilterNoop() throws Exception { ImmutableList<Artifact> resources = getResources("values-en/foo.xml", "values-es/bar.xml"); assertFilter(resources, resources); } @Test public void testFilterToEmpty() throws Exception { assertFilter(getResources("values-en/foo.xml", "values-es/bar.xml"), ImmutableList.of()); } @Test public void testPartiallyFilter() throws Exception { Artifact keptResource = getResource("values-en/foo.xml"); assertFilter( ImmutableList.of(keptResource, getResource("values-es/bar.xml")), ImmutableList.of(keptResource)); } @Test public void testFilterIsDependency() throws Exception { Artifact keptResource = getResource("values-en/foo.xml"); assertFilter( ImmutableList.of(keptResource, getResource("drawable/bar.png")), ImmutableList.of(keptResource), /* isDependency = */ true); } @Test public void testFilterValidatedNoop() throws Exception { ImmutableList<Artifact> resources = getResources("values-en/foo.xml", "values-es/bar.xml"); assertFilterValidated(resources, resources); } @Test public void testFilterValidated() throws Exception { Artifact keptResource = getResource("values-en/foo.xml"); assertFilterValidated( ImmutableList.of(keptResource, getResource("drawable/bar.png")), ImmutableList.of(keptResource)); } private void assertFilterValidated( ImmutableList<Artifact> unfilteredResources, ImmutableList<Artifact> filteredResources) throws Exception { RuleContext ruleContext = getRuleContext(); final AndroidDataContext dataContext = AndroidDataContext.forNative(ruleContext); ValidatedAndroidResources unfiltered = new AndroidResources(unfilteredResources, getResourceRoots(unfilteredResources)) .process( ruleContext, dataContext, getManifest(), DataBinding.contextFrom(ruleContext, dataContext.getAndroidConfig()), /* neverlink = */ false); Optional<? extends AndroidResources> maybeFiltered = assertFilter(unfiltered, filteredResources, /* isDependency = */ true); if (maybeFiltered.isPresent()) { AndroidResources filtered = maybeFiltered.get(); assertThat(filtered instanceof ValidatedAndroidResources).isTrue(); ValidatedAndroidResources validated = (ValidatedAndroidResources) filtered; // Validate fields related to validation are unchanged assertThat(validated.getRTxt()).isEqualTo(unfiltered.getRTxt()); assertThat(validated.getAapt2RTxt()).isEqualTo(unfiltered.getAapt2RTxt()); } } private void assertFilter( ImmutableList<Artifact> unfilteredResources, ImmutableList<Artifact> filteredResources) throws Exception { assertFilter(unfilteredResources, filteredResources, /* isDependency = */ false); } private void assertFilter( ImmutableList<Artifact> unfilteredResources, ImmutableList<Artifact> filteredResources, boolean isDependency) throws Exception { AndroidResources unfiltered = new AndroidResources(unfilteredResources, getResourceRoots(unfilteredResources)); assertFilter(unfiltered, filteredResources, isDependency); } private Optional<? extends AndroidResources> assertFilter( AndroidResources unfiltered, ImmutableList<Artifact> filteredResources, boolean isDependency) throws Exception { ImmutableList.Builder<Artifact> filteredDepsBuilder = ImmutableList.builder(); ResourceFilter fakeFilter = ResourceFilter.of(ImmutableSet.copyOf(filteredResources), filteredDepsBuilder::add); Optional<? extends AndroidResources> filtered = unfiltered.maybeFilter(errorConsumer, fakeFilter, isDependency); if (filteredResources.equals(unfiltered.getResources())) { // We expect filtering to have been a no-op assertThat(filtered.isPresent()).isFalse(); } else { // The resources and their roots should be filtered assertThat(filtered.get().getResources()) .containsExactlyElementsIn(filteredResources) .inOrder(); assertThat(filtered.get().getResourceRoots()) .containsExactlyElementsIn(getResourceRoots(filteredResources)) .inOrder(); } if (!isDependency) { // The filter should not record any filtered dependencies assertThat(filteredDepsBuilder.build()).isEmpty(); } else { // The filtered dependencies should be exactly the list of filtered resources assertThat(unfiltered.getResources()) .containsExactlyElementsIn( Iterables.concat(filteredDepsBuilder.build(), filteredResources)); } return filtered; } @Test public void testParseAndCompile() throws Exception { RuleContext ruleContext = getRuleContext(); ParsedAndroidResources parsed = assertParse(ruleContext); assertThat(parsed.getCompiledSymbols()).isNotNull(); // Since there was no data binding, the compile action should just take in resources and output // compiled symbols. assertActionArtifacts( ruleContext, /* inputs = */ parsed.getResources(), /* outputs = */ ImmutableList.of(parsed.getCompiledSymbols())); } @Test public void testParseWithDataBinding() throws Exception { RuleContext ruleContext = getRuleContextWithDataBinding(); ParsedAndroidResources parsed = assertParse(ruleContext); // The compile action should take in resources and manifest in and output compiled symbols and // a databinding zip. assertActionArtifacts( ruleContext, /* inputs = */ ImmutableList.<Artifact>builder() .addAll(parsed.getResources()) .add(parsed.getManifest()) .build(), /* outputs = */ ImmutableList.of( parsed.getCompiledSymbols(), DataBinding.getLayoutInfoFile(ruleContext))); } @Test public void testMergeCompiled() throws Exception { RuleContext ruleContext = getRuleContext(); ParsedAndroidResources parsed = assertParse(ruleContext); MergedAndroidResources merged = parsed.merge( AndroidDataContext.forNative(ruleContext), ResourceDependencies.fromRuleDeps(ruleContext, /* neverlink = */ false)); // Besides processed manifest, inherited values should be equal assertThat(parsed).isEqualTo(new ParsedAndroidResources(merged, parsed.getStampedManifest())); // There should be a new processed manifest assertThat(merged.getManifest()).isNotEqualTo(parsed.getManifest()); assertThat(merged.getDataBindingInfoZip()).isNull(); assertThat(merged.getCompiledSymbols()).isNotNull(); // We use the compiled symbols file to build the resource class jar assertActionArtifacts( ruleContext, /*inputs=*/ ImmutableList.of(merged.getCompiledSymbols(), parsed.getManifest()), /*outputs=*/ ImmutableList.of(merged.getClassJar(), merged.getManifest())); } @Test public void testValidateAapt2() throws Exception { RuleContext ruleContext = getRuleContext(); MergedAndroidResources merged = makeMergedResources(ruleContext); ValidatedAndroidResources validated = merged.validate(AndroidDataContext.forNative(ruleContext)); // Inherited values should be equal assertThat(merged).isEqualTo(new MergedAndroidResources(validated)); // aapt artifacts should be generated assertActionArtifacts( ruleContext, /* inputs = */ ImmutableList.of(validated.getCompiledSymbols(), validated.getManifest()), /* outputs = */ ImmutableList.of( validated.getRTxt(), validated.getJavaSourceJar(), validated.getApk())); // aapt2 artifacts should be recorded assertThat(validated.getCompiledSymbols()).isNotNull(); assertThat(validated.getAapt2RTxt()).isNotNull(); assertThat(validated.getAapt2SourceJar()).isNotNull(); assertThat(validated.getStaticLibrary()).isNotNull(); // Compile the resources into compiled symbols files assertActionArtifacts( ruleContext, /* inputs = */ validated.getResources(), /* outputs = */ ImmutableList.of(validated.getCompiledSymbols())); // Use the compiled symbols and manifest to build aapt2 packaging outputs assertActionArtifacts( ruleContext, /* inputs = */ ImmutableList.of(validated.getCompiledSymbols(), validated.getManifest()), /* outputs = */ ImmutableList.of( validated.getAapt2RTxt(), validated.getAapt2SourceJar(), validated.getStaticLibrary())); } @Test public void testValidate() throws Exception { RuleContext ruleContext = getRuleContext(); makeMergedResources(ruleContext).validate(AndroidDataContext.forNative(ruleContext)); Set<String> actionMnemonics = ruleContext.getAnalysisEnvironment().getRegisteredActions().stream() .map(ActionAnalysisMetadata::getMnemonic) .collect(ImmutableSet.toImmutableSet()); // These are unfortunately the mnemonics used in Bazel; these should be changed once aapt1 is // removed. assertThat(actionMnemonics).contains("AndroidResourceLink"); // aapt2 validation assertThat(actionMnemonics).doesNotContain("AndroidResourceValidator"); // aapt1 validation } @Test public void testGenerateRClass() throws Exception { RuleContext ruleContext = getRuleContext(); Artifact rTxt = ruleContext.getImplicitOutputArtifact(AndroidRuleClasses.ANDROID_R_TXT); ProcessedAndroidManifest manifest = getManifest(); ProcessedAndroidData processedData = ProcessedAndroidData.of( makeParsedResources(ruleContext), AndroidAssets.from(ruleContext) .process(AndroidDataContext.forNative(ruleContext), AssetDependencies.empty()), manifest, rTxt, ruleContext.getImplicitOutputArtifact(AndroidRuleClasses.ANDROID_JAVA_SOURCE_JAR), ruleContext.getImplicitOutputArtifact(AndroidRuleClasses.ANDROID_RESOURCES_APK), /* dataBindingInfoZip = */ null, ResourceDependencies.fromRuleDeps(ruleContext, /* neverlink = */ false), null, null); ValidatedAndroidResources validated = processedData .generateRClass(AndroidDataContext.forNative(ruleContext)) .getValidatedResources(); // An action to generate the R.class file should be registered. assertActionArtifacts( ruleContext, /* inputs = */ ImmutableList.of(rTxt, manifest.getManifest()), /* outputs = */ ImmutableList.of(validated.getJavaClassJar())); } @Test public void testProcessBinaryDataGeneratesProguardOutput() throws Exception { RuleContext ruleContext = getRuleContext("android_binary", "manifest='AndroidManifest.xml',"); AndroidDataContext dataContext = AndroidDataContext.forNative(ruleContext); ResourceApk resourceApk = ProcessedAndroidData.processBinaryDataFrom( dataContext, ruleContext, getManifest(), false, ImmutableMap.of(), AndroidResources.empty(), AndroidAssets.empty(), ResourceDependencies.empty(), AssetDependencies.empty(), ResourceFilterFactory.empty(), ImmutableList.of(), false, DataBinding.contextFrom(ruleContext, dataContext.getAndroidConfig())) .generateRClass(dataContext); assertThat(resourceApk.getResourceProguardConfig()).isNotNull(); assertThat(resourceApk.getMainDexProguardConfig()).isNotNull(); } /** * Validates that a parse action was invoked correctly. Returns the {@link ParsedAndroidResources} * for further validation. */ private ParsedAndroidResources assertParse(RuleContext ruleContext) throws Exception { return assertParse( ruleContext, DataBinding.contextFrom( ruleContext, ruleContext.getConfiguration().getFragment(AndroidConfiguration.class))); } private ParsedAndroidResources assertParse( RuleContext ruleContext, DataBindingContext dataBindingContext) throws Exception { ImmutableList<Artifact> resources = getResources("values-en/foo.xml", "drawable-hdpi/bar.png"); AndroidResources raw = new AndroidResources( resources, AndroidResources.getResourceRoots(ruleContext, resources, "resource_files")); StampedAndroidManifest manifest = getManifest(); ParsedAndroidResources parsed = raw.parse( AndroidDataContext.forNative(ruleContext), manifest, dataBindingContext); // Inherited values should be equal assertThat(raw).isEqualTo(new AndroidResources(parsed)); // Label should be set from RuleContext assertThat(parsed.getLabel()).isEqualTo(ruleContext.getLabel()); return parsed; } private MergedAndroidResources makeMergedResources(RuleContext ruleContext) throws RuleErrorException, InterruptedException { return makeParsedResources(ruleContext) .merge( AndroidDataContext.forNative(ruleContext), ResourceDependencies.fromRuleDeps(ruleContext, /* neverlink = */ false)); } private ParsedAndroidResources makeParsedResources(RuleContext ruleContext) throws RuleErrorException, InterruptedException { DataBindingContext dataBindingContext = DataBinding.contextFrom( ruleContext, ruleContext.getConfiguration().getFragment(AndroidConfiguration.class)); return makeParsedResources(ruleContext, dataBindingContext); } private ParsedAndroidResources makeParsedResources( RuleContext ruleContext, DataBindingContext dataBindingContext) throws RuleErrorException, InterruptedException { ImmutableList<Artifact> resources = getResources("values-en/foo.xml", "drawable-hdpi/bar.png"); return new AndroidResources( resources, AndroidResources.getResourceRoots(ruleContext, resources, "resource_files")) .parse( AndroidDataContext.forNative(ruleContext), getManifest(), dataBindingContext); } private ProcessedAndroidManifest getManifest() { return new ProcessedAndroidManifest( getResource("some/path/AndroidManifest.xml"), "some.java.pkg", /* exported = */ true); } /** Gets a dummy rule context object by creating a dummy target. */ private RuleContext getRuleContext() throws Exception { return getRuleContext("android_library"); } private RuleContext getRuleContextWithDataBinding() throws Exception { return getRuleContext("android_library", "enable_data_binding = 1"); } /** Gets a dummy rule context object by creating a dummy target. */ private RuleContext getRuleContext(String kind, String... additionalLines) throws Exception { ConfiguredTarget target = scratchConfiguredTarget( "java/foo", "target", ImmutableList.<String>builder() .add(kind + "(name = 'target',") .add(additionalLines) .add(")") .build() .toArray(new String[0])); return getRuleContextForActionTesting(target); } }
/* * Licensed to Metamarkets Group Inc. (Metamarkets) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Metamarkets licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.druid.server; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.jaxrs.smile.SmileMediaTypes; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableMap; import com.google.inject.Inject; import com.google.inject.Provider; import com.metamx.emitter.EmittingLogger; import com.metamx.emitter.service.ServiceEmitter; import io.druid.client.selector.Server; import io.druid.guice.annotations.Json; import io.druid.guice.annotations.Smile; import io.druid.guice.http.DruidHttpClientConfig; import io.druid.java.util.common.DateTimes; import io.druid.java.util.common.IAE; import io.druid.query.DruidMetrics; import io.druid.query.GenericQueryMetricsFactory; import io.druid.query.Query; import io.druid.query.QueryMetrics; import io.druid.query.QueryToolChestWarehouse; import io.druid.server.log.RequestLogger; import io.druid.server.metrics.QueryCountStatsProvider; import io.druid.server.router.QueryHostFinder; import io.druid.server.router.Router; import io.druid.server.security.AuthConfig; import io.druid.server.security.Escalator; import org.eclipse.jetty.client.HttpClient; import org.eclipse.jetty.client.api.Request; import org.eclipse.jetty.client.api.Response; import org.eclipse.jetty.client.api.Result; import org.eclipse.jetty.client.util.BytesContentProvider; import org.eclipse.jetty.http.HttpMethod; import org.eclipse.jetty.proxy.AsyncProxyServlet; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.ws.rs.core.MediaType; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.URI; import java.net.URISyntaxException; import java.net.URLDecoder; import java.util.Map; import java.util.UUID; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; /** * This class does async query processing and should be merged with QueryResource at some point */ public class AsyncQueryForwardingServlet extends AsyncProxyServlet implements QueryCountStatsProvider { private static final EmittingLogger log = new EmittingLogger(AsyncQueryForwardingServlet.class); @Deprecated // use SmileMediaTypes.APPLICATION_JACKSON_SMILE private static final String APPLICATION_SMILE = "application/smile"; private static final String HOST_ATTRIBUTE = "io.druid.proxy.to.host"; private static final String SCHEME_ATTRIBUTE = "io.druid.proxy.to.host.scheme"; private static final String QUERY_ATTRIBUTE = "io.druid.proxy.query"; private static final String AVATICA_QUERY_ATTRIBUTE = "io.druid.proxy.avaticaQuery"; private static final String OBJECTMAPPER_ATTRIBUTE = "io.druid.proxy.objectMapper"; private static final int CANCELLATION_TIMEOUT_MILLIS = 500; private final AtomicLong successfulQueryCount = new AtomicLong(); private final AtomicLong failedQueryCount = new AtomicLong(); private final AtomicLong interruptedQueryCount = new AtomicLong(); private static void handleException(HttpServletResponse response, ObjectMapper objectMapper, Exception exception) throws IOException { if (!response.isCommitted()) { final String errorMessage = exception.getMessage() == null ? "null exception" : exception.getMessage(); response.resetBuffer(); response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); objectMapper.writeValue( response.getOutputStream(), ImmutableMap.of("error", errorMessage) ); } response.flushBuffer(); } private final QueryToolChestWarehouse warehouse; private final ObjectMapper jsonMapper; private final ObjectMapper smileMapper; private final QueryHostFinder hostFinder; private final Provider<HttpClient> httpClientProvider; private final DruidHttpClientConfig httpClientConfig; private final ServiceEmitter emitter; private final RequestLogger requestLogger; private final GenericQueryMetricsFactory queryMetricsFactory; private final Escalator escalator; private HttpClient broadcastClient; @Inject public AsyncQueryForwardingServlet( QueryToolChestWarehouse warehouse, @Json ObjectMapper jsonMapper, @Smile ObjectMapper smileMapper, QueryHostFinder hostFinder, @Router Provider<HttpClient> httpClientProvider, @Router DruidHttpClientConfig httpClientConfig, ServiceEmitter emitter, RequestLogger requestLogger, GenericQueryMetricsFactory queryMetricsFactory, Escalator escalator ) { this.warehouse = warehouse; this.jsonMapper = jsonMapper; this.smileMapper = smileMapper; this.hostFinder = hostFinder; this.httpClientProvider = httpClientProvider; this.httpClientConfig = httpClientConfig; this.emitter = emitter; this.requestLogger = requestLogger; this.queryMetricsFactory = queryMetricsFactory; this.escalator = escalator; } @Override public void init() throws ServletException { super.init(); // Note that httpClientProvider is setup to return same HttpClient instance on each get() so // it is same http client as that is used by parent ProxyServlet. broadcastClient = newHttpClient(); try { broadcastClient.start(); } catch (Exception e) { throw new ServletException(e); } } @Override public void destroy() { super.destroy(); try { broadcastClient.stop(); } catch (Exception e) { log.warn(e, "Error stopping servlet"); } } @Override protected void service(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { final boolean isSmile = SmileMediaTypes.APPLICATION_JACKSON_SMILE.equals(request.getContentType()) || APPLICATION_SMILE.equals(request.getContentType()); final ObjectMapper objectMapper = isSmile ? smileMapper : jsonMapper; request.setAttribute(OBJECTMAPPER_ATTRIBUTE, objectMapper); final Server defaultServer = hostFinder.getDefaultServer(); request.setAttribute(HOST_ATTRIBUTE, defaultServer.getHost()); request.setAttribute(SCHEME_ATTRIBUTE, defaultServer.getScheme()); // The Router does not have the ability to look inside SQL queries and route them intelligently, so just treat // them as a generic request. final boolean isQueryEndpoint = request.getRequestURI().startsWith("/druid/v2") && !request.getRequestURI().startsWith("/druid/v2/sql"); final boolean isAvatica = request.getRequestURI().startsWith("/druid/v2/sql/avatica"); if (isAvatica) { Map<String, Object> requestMap = objectMapper.readValue(request.getInputStream(), Map.class); String connectionId = getAvaticaConnectionId(requestMap); Server targetServer = hostFinder.findServerAvatica(connectionId); byte[] requestBytes = objectMapper.writeValueAsBytes(requestMap); request.setAttribute(HOST_ATTRIBUTE, targetServer.getHost()); request.setAttribute(SCHEME_ATTRIBUTE, targetServer.getScheme()); request.setAttribute(AVATICA_QUERY_ATTRIBUTE, requestBytes); } else if (isQueryEndpoint && HttpMethod.DELETE.is(request.getMethod())) { // query cancellation request for (final Server server: hostFinder.getAllServers()) { // send query cancellation to all brokers this query may have gone to // to keep the code simple, the proxy servlet will also send a request to one of the default brokers if (!server.getHost().equals(defaultServer.getHost())) { // issue async requests Response.CompleteListener completeListener = result -> { if (result.isFailed()) { log.warn( result.getFailure(), "Failed to forward cancellation request to [%s]", server.getHost() ); } }; broadcastClient .newRequest(rewriteURI(request, server.getScheme(), server.getHost())) .method(HttpMethod.DELETE) .timeout(CANCELLATION_TIMEOUT_MILLIS, TimeUnit.MILLISECONDS) .send(completeListener); } interruptedQueryCount.incrementAndGet(); } } else if (isQueryEndpoint && HttpMethod.POST.is(request.getMethod())) { // query request try { Query inputQuery = objectMapper.readValue(request.getInputStream(), Query.class); if (inputQuery != null) { final Server server = hostFinder.getServer(inputQuery); request.setAttribute(HOST_ATTRIBUTE, server.getHost()); request.setAttribute(SCHEME_ATTRIBUTE, server.getScheme()); if (inputQuery.getId() == null) { inputQuery = inputQuery.withId(UUID.randomUUID().toString()); } } request.setAttribute(QUERY_ATTRIBUTE, inputQuery); } catch (IOException e) { log.warn(e, "Exception parsing query"); final String errorMessage = e.getMessage() == null ? "no error message" : e.getMessage(); requestLogger.log( new RequestLogLine( DateTimes.nowUtc(), request.getRemoteAddr(), null, new QueryStats(ImmutableMap.<String, Object>of("success", false, "exception", errorMessage)) ) ); response.setStatus(HttpServletResponse.SC_BAD_REQUEST); response.setContentType(MediaType.APPLICATION_JSON); objectMapper.writeValue( response.getOutputStream(), ImmutableMap.of("error", errorMessage) ); return; } catch (Exception e) { handleException(response, objectMapper, e); return; } } super.service(request, response); } @Override protected void sendProxyRequest( HttpServletRequest clientRequest, HttpServletResponse proxyResponse, Request proxyRequest ) { proxyRequest.timeout(httpClientConfig.getReadTimeout().getMillis(), TimeUnit.MILLISECONDS); proxyRequest.idleTimeout(httpClientConfig.getReadTimeout().getMillis(), TimeUnit.MILLISECONDS); byte[] avaticaQuery = (byte[]) clientRequest.getAttribute(AVATICA_QUERY_ATTRIBUTE); if (avaticaQuery != null) { proxyRequest.content(new BytesContentProvider(avaticaQuery)); } final Query query = (Query) clientRequest.getAttribute(QUERY_ATTRIBUTE); if (query != null) { final ObjectMapper objectMapper = (ObjectMapper) clientRequest.getAttribute(OBJECTMAPPER_ATTRIBUTE); try { proxyRequest.content(new BytesContentProvider(objectMapper.writeValueAsBytes(query))); } catch (JsonProcessingException e) { Throwables.propagate(e); } } // Since we can't see the request object on the remote side, we can't check whether the remote side actually // performed an authorization check here, so always set this to true for the proxy servlet. // If the remote node failed to perform an authorization check, PreResponseAuthorizationCheckFilter // will log that on the remote node. clientRequest.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, true); super.sendProxyRequest( clientRequest, proxyResponse, proxyRequest ); } @Override protected Response.Listener newProxyResponseListener( HttpServletRequest request, HttpServletResponse response ) { final Query query = (Query) request.getAttribute(QUERY_ATTRIBUTE); if (query != null) { return newMetricsEmittingProxyResponseListener(request, response, query, System.nanoTime()); } else { return super.newProxyResponseListener(request, response); } } @Override protected String rewriteTarget(HttpServletRequest request) { return rewriteURI(request, (String) request.getAttribute(SCHEME_ATTRIBUTE), (String) request.getAttribute(HOST_ATTRIBUTE)).toString(); } protected URI rewriteURI(HttpServletRequest request, String scheme, String host) { return makeURI(scheme, host, request.getRequestURI(), request.getQueryString()); } protected static URI makeURI(String scheme, String host, String requestURI, String rawQueryString) { try { return new URI( scheme, host, requestURI, rawQueryString == null ? null : URLDecoder.decode(rawQueryString, "UTF-8"), null ); } catch (UnsupportedEncodingException | URISyntaxException e) { log.error(e, "Unable to rewrite URI [%s]", e.getMessage()); throw Throwables.propagate(e); } } @Override protected HttpClient newHttpClient() { return escalator.createEscalatedJettyClient(httpClientProvider.get()); } @Override protected HttpClient createHttpClient() throws ServletException { HttpClient client = super.createHttpClient(); // override timeout set in ProxyServlet.createHttpClient setTimeout(httpClientConfig.getReadTimeout().getMillis()); return client; } private Response.Listener newMetricsEmittingProxyResponseListener( HttpServletRequest request, HttpServletResponse response, Query query, long startNs ) { return new MetricsEmittingProxyResponseListener(request, response, query, startNs); } @Override public long getSuccessfulQueryCount() { return successfulQueryCount.get(); } @Override public long getFailedQueryCount() { return failedQueryCount.get(); } @Override public long getInterruptedQueryCount() { return interruptedQueryCount.get(); } private static String getAvaticaConnectionId(Map<String, Object> requestMap) throws IOException { Object connectionIdObj = requestMap.get("connectionId"); if (connectionIdObj == null) { throw new IAE("Received an Avatica request without a connectionId."); } if (!(connectionIdObj instanceof String)) { throw new IAE("Received an Avatica request with a non-String connectionId."); } return (String) connectionIdObj; } private class MetricsEmittingProxyResponseListener extends ProxyResponseListener { private final HttpServletRequest req; private final HttpServletResponse res; private final Query query; private final long startNs; public MetricsEmittingProxyResponseListener( HttpServletRequest request, HttpServletResponse response, Query query, long startNs ) { super(request, response); this.req = request; this.res = response; this.query = query; this.startNs = startNs; } @Override public void onComplete(Result result) { final long requestTimeNs = System.nanoTime() - startNs; try { boolean success = result.isSucceeded(); if (success) { successfulQueryCount.incrementAndGet(); } else { failedQueryCount.incrementAndGet(); } emitQueryTime(requestTimeNs, success); requestLogger.log( new RequestLogLine( DateTimes.nowUtc(), req.getRemoteAddr(), query, new QueryStats( ImmutableMap.<String, Object>of( "query/time", TimeUnit.NANOSECONDS.toMillis(requestTimeNs), "success", success && result.getResponse().getStatus() == javax.ws.rs.core.Response.Status.OK.getStatusCode() ) ) ) ); } catch (Exception e) { log.error(e, "Unable to log query [%s]!", query); } super.onComplete(result); } @Override public void onFailure(Response response, Throwable failure) { try { final String errorMessage = failure.getMessage(); failedQueryCount.incrementAndGet(); emitQueryTime(System.nanoTime() - startNs, false); requestLogger.log( new RequestLogLine( DateTimes.nowUtc(), req.getRemoteAddr(), query, new QueryStats( ImmutableMap.<String, Object>of( "success", false, "exception", errorMessage == null ? "no message" : errorMessage ) ) ) ); } catch (IOException logError) { log.error(logError, "Unable to log query [%s]!", query); } log.makeAlert(failure, "Exception handling request") .addData("exception", failure.toString()) .addData("query", query) .addData("peer", req.getRemoteAddr()) .emit(); super.onFailure(response, failure); } private void emitQueryTime(long requestTimeNs, boolean success) throws JsonProcessingException { QueryMetrics queryMetrics = DruidMetrics.makeRequestMetrics( queryMetricsFactory, warehouse.getToolChest(query), query, req.getRemoteAddr() ); queryMetrics.success(success); queryMetrics.reportQueryTime(requestTimeNs).emit(emitter); } } }
/* Derby - Class org.apache.derby.iapi.sql.dictionary.DefaultDescriptor Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.derby.iapi.sql.dictionary; import org.apache.derby.catalog.Dependable; import org.apache.derby.catalog.DependableFinder; import org.apache.derby.catalog.UUID; import org.apache.derby.iapi.error.StandardException; import org.apache.derby.iapi.reference.SQLState; import org.apache.derby.iapi.services.i18n.MessageService; import org.apache.derby.iapi.services.io.StoredFormatIds; import org.apache.derby.shared.common.sanity.SanityManager; import org.apache.derby.iapi.sql.conn.LanguageConnectionContext; import org.apache.derby.iapi.sql.depend.DependencyManager; import org.apache.derby.iapi.sql.depend.Dependent; import org.apache.derby.iapi.sql.depend.Provider; /** * This interface is used to get information from a DefaultDescriptor. * */ public final class DefaultDescriptor extends UniqueTupleDescriptor implements Provider, Dependent { private final int columnNumber; private final UUID defaultUUID; private final UUID tableUUID; /** * Constructor for a DefaultDescriptor * * @param dataDictionary the DD * @param defaultUUID The UUID of the default * @param tableUUID The UUID of the table * @param columnNumber The column number of the column that the default is for */ public DefaultDescriptor(DataDictionary dataDictionary, UUID defaultUUID, UUID tableUUID, int columnNumber) { super( dataDictionary ); this.defaultUUID = defaultUUID; this.tableUUID = tableUUID; this.columnNumber = columnNumber; } /** * Get the UUID of the default. * * @return The UUID of the default. */ public UUID getUUID() { return defaultUUID; } /** * Get the UUID of the table. * * @return The UUID of the table. */ public UUID getTableUUID() { return tableUUID; } /** * Get the column number of the column. * * @return The column number of the column. */ public int getColumnNumber() { return columnNumber; } /** * Convert the DefaultDescriptor to a String. * * @return A String representation of this DefaultDescriptor */ public String toString() { if (SanityManager.DEBUG) { /* ** NOTE: This does not format table, because table.toString() ** formats columns, leading to infinite recursion. */ return "defaultUUID: " + defaultUUID + "\n" + "tableUUID: " + tableUUID + "\n" + "columnNumber: " + columnNumber + "\n"; } else { return ""; } } //////////////////////////////////////////////////////////////////// // // PROVIDER INTERFACE // //////////////////////////////////////////////////////////////////// /** @return the stored form of this provider @see Dependable#getDependableFinder */ public DependableFinder getDependableFinder() { return getDependableFinder(StoredFormatIds.DEFAULT_DESCRIPTOR_FINDER_V01_ID); } /** * Return the name of this Provider. (Useful for errors.) * * @return String The name of this provider. */ public String getObjectName() { return "default"; } /** * Get the provider's UUID * * @return The provider's UUID */ public UUID getObjectID() { return defaultUUID; } /** * Get the provider's type. * * @return char The provider's type. */ public String getClassType() { return Dependable.DEFAULT; } ////////////////////////////////////////////////////// // // DEPENDENT INTERFACE // ////////////////////////////////////////////////////// /** * Check that all of the dependent's dependencies are valid. * * @return true if the dependent is currently valid */ public synchronized boolean isValid() { return true; } /** * Prepare to mark the dependent as invalid (due to at least one of * its dependencies being invalid). * * @param action The action causing the invalidation * @param p the provider * * @exception StandardException thrown if unable to make it invalid */ public void prepareToInvalidate(Provider p, int action, LanguageConnectionContext lcc) throws StandardException { DependencyManager dm = getDataDictionary().getDependencyManager(); switch (action) { /* ** Currently, the only thing we are depenedent ** on is an alias. */ default: DataDictionary dd = getDataDictionary(); ColumnDescriptor cd = dd.getColumnDescriptorByDefaultId(defaultUUID); TableDescriptor td = dd.getTableDescriptor(cd.getReferencingUUID()); throw StandardException.newException(SQLState.LANG_PROVIDER_HAS_DEPENDENT_OBJECT, dm.getActionString(action), p.getObjectName(), "DEFAULT", td.getQualifiedName() + "." + cd.getColumnName()); } } /** * Mark the dependent as invalid (due to at least one of * its dependencies being invalid). Always an error * for a constraint -- should never have gotten here. * * @param action The action causing the invalidation * * @exception StandardException thrown if called in sanity mode */ public void makeInvalid(int action, LanguageConnectionContext lcc) throws StandardException { /* ** We should never get here, we should have barfed on ** prepareToInvalidate(). */ if (SanityManager.DEBUG) { DependencyManager dm; dm = getDataDictionary().getDependencyManager(); SanityManager.THROWASSERT("makeInvalid("+ dm.getActionString(action)+ ") not expected to get called"); } } }
/** * Copyright (c) 2020, the Alpha Team. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1) Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * 2) Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package at.ac.tuwien.kr.alpha.api.externals.stdlib; import java.time.LocalDateTime; import java.time.format.DateTimeFormatter; import java.util.Collections; import java.util.List; import java.util.Set; import at.ac.tuwien.kr.alpha.api.externals.Predicate; import at.ac.tuwien.kr.alpha.common.terms.ConstantTerm; import at.ac.tuwien.kr.alpha.common.terms.Terms; /** * Collection of methods that can be used as external atoms from ASP programs. * Provides commonly used functionality such as basic string operations, * datetime handling etc. * * All functions exposed by this class are guaranteed to be stateless and * side-effect free. * * Copyright (c) 2020, the Alpha Team. */ public final class AspStandardLibrary { private AspStandardLibrary() { throw new AssertionError(this.getClass().getSimpleName() + " is a non-instantiable utility class!"); } /** * Parses a string representing a datetime without time-zone and returns the * year, month, day, hours, minutes and seconds as separate symbolic integer * terms. * Example: * * <pre> * A valid ground instance of the atom &stdlib_datetime_parse[DTSTR, "dd.mm.yyyy hh:MM:ss"](YEAR, MONTH, DAY, HOUR, MIN, SEC) * would be: &stdlib_datetime_parse["20.05.2020 01:19:13", "dd.mm.yyyy hh:MM:ss"](2020, 5, 20, 1, 19, 13) * </pre> * * Timezones are not supported by this function. Datetime values are parsed * using {@link LocalDateTime.parse}. * * @param datetime a string representing a datetime without time zone * information * @param format a format string that is accepted by {@link DateTimeFormatter} * @return a 6-value integer tuple of format (YEAR, MONTH, DAY, HOUR, MIN, SEC) */ @Predicate(name = "stdlib_datetime_parse") public static Set<List<ConstantTerm<Integer>>> datetimeParse(String dtstr, String format) { DateTimeFormatter formatter = DateTimeFormatter.ofPattern(format); LocalDateTime datetime = LocalDateTime.parse(dtstr, formatter); List<ConstantTerm<Integer>> terms = Terms.asTermList( datetime.getYear(), datetime.getMonth().getValue(), datetime.getDayOfMonth(), datetime.getHour(), datetime.getMinute(), datetime.getSecond()); return Collections.singleton(terms); } /** * Compares two datetimes and returns true iff the first datetime (dt1) is * before the second datetime (dt2). Both datetimes are represented as six * integers each, referring to years, months, days, hours, minutes and seconds * respectively. * * @param dt1Year the year field for dt1 * @param dt1Month the month field for dt1 * @param dt1Day the day field for dt1 * @param dt1Hour the hour field for dt1 * @param dt1Minute the minute field for dt1 * @param dt1Second the second field for dt1 * @param dt2Year the year field for dt2 * @param dt2Month the month field for dt2 * @param dt2Day the day field for dt2 * @param dt2Hour the hour field for dt2 * @param dt2Minute the minute field for dt2 * @param dt2Second the second field for dt2 * @return true if dt1 is before dt2 in time, false otherwise */ @Predicate(name = "stdlib_datetime_is_before") public static boolean datetimeIsBefore(int dt1Year, int dt1Month, int dt1Day, int dt1Hour, int dt1Minute, int dt1Second, int dt2Year, int dt2Month, int dt2Day, int dt2Hour, int dt2Minute, int dt2Second) { LocalDateTime dt1 = LocalDateTime.of(dt1Year, dt1Month, dt1Day, dt1Hour, dt1Minute, dt1Second); LocalDateTime dt2 = LocalDateTime.of(dt2Year, dt2Month, dt2Day, dt2Hour, dt2Minute, dt2Second); return dt1.isBefore(dt2); } /** * Compares two datetimes and returns true iff the first datetime (dt1) is * equal to the second datetime (dt2). Both datetimes are represented as six * integers each, referring to years, months, days, hours, minutes and seconds * respectively. * * @param dt1Year the year field for dt1 * @param dt1Month the month field for dt1 * @param dt1Day the day field for dt1 * @param dt1Hour the hour field for dt1 * @param dt1Minute the minute field for dt1 * @param dt1Second the second field for dt1 * @param dt2Year the year field for dt2 * @param dt2Month the month field for dt2 * @param dt2Day the day field for dt2 * @param dt2Hour the hour field for dt2 * @param dt2Minute the minute field for dt2 * @param dt2Second the second field for dt2 * @return true if dt1 is equal to dt2, false otherwise */ @Predicate(name = "stdlib_datetime_is_equal") public static boolean datetimeIsEqual(int dt1Year, int dt1Month, int dt1Day, int dt1Hour, int dt1Minute, int dt1Second, int dt2Year, int dt2Month, int dt2Day, int dt2Hour, int dt2Minute, int dt2Second) { LocalDateTime dt1 = LocalDateTime.of(dt1Year, dt1Month, dt1Day, dt1Hour, dt1Minute, dt1Second); LocalDateTime dt2 = LocalDateTime.of(dt2Year, dt2Month, dt2Day, dt2Hour, dt2Minute, dt2Second); return dt1.isEqual(dt2); } /** * Compares two datetimes and returns true iff the first datetime (dt1) is * before or equal to the second datetime (dt2). Both datetimes are represented * as six integers each, referring to years, months, days, hours, minutes and seconds * respectively. * * @param dt1Year the year field for dt1 * @param dt1Month the month field for dt1 * @param dt1Day the day field for dt1 * @param dt1Hour the hour field for dt1 * @param dt1Minute the minute field for dt1 * @param dt1Second the second field for dt1 * @param dt2Year the year field for dt2 * @param dt2Month the month field for dt2 * @param dt2Day the day field for dt2 * @param dt2Hour the hour field for dt2 * @param dt2Minute the minute field for dt2 * @param dt2Second the second field for dt2 * @return true if dt1 is before dt2 in time or both dt1 and dt2 denote the same * point in time, false otherwise */ @Predicate(name = "stdlib_datetime_is_before_or_equal") public static boolean datetimeIsBeforeOrEqual(int dt1Year, int dt1Month, int dt1Day, int dt1Hour, int dt1Minute, int dt1Second, int dt2Year, int dt2Month, int dt2Day, int dt2Hour, int dt2Minute, int dt2Second) { LocalDateTime dt1 = LocalDateTime.of(dt1Year, dt1Month, dt1Day, dt1Hour, dt1Minute, dt1Second); LocalDateTime dt2 = LocalDateTime.of(dt2Year, dt2Month, dt2Day, dt2Hour, dt2Minute, dt2Second); return dt1.isBefore(dt2) || dt1.isEqual(dt2); } /** * Formats a datetime value represented using six integers as a string according * to the given pattern. Valid format trings are those accepted by * {@link DateTimeFormatter.ofPattern}. * * @param year * @param month * @param day * @param hours * @param minutes * @param seconds * @param format * @return a string representing the given datetime in the format specified by * the format string */ @Predicate(name = "stdlib_datetime_to_string") public static Set<List<ConstantTerm<String>>> datetimeToString(int year, int month, int day, int hours, int minutes, int seconds, String format) { LocalDateTime datetime = LocalDateTime.of(year, month, day, hours, minutes, seconds); DateTimeFormatter formatter = DateTimeFormatter.ofPattern(format); return Collections.singleton(Terms.asTermList(formatter.format(datetime))); } /** * Checks whether the given string matches the given regex. */ @Predicate(name = "stdlib_string_matches_regex") public static boolean stringMatchesRegex(String str, String regex) { return str.matches(regex); } /** * Returns the length of the given string */ @Predicate(name = "stdlib_string_length") public static Set<List<ConstantTerm<Integer>>> stringLength(String str) { return Collections.singleton(Terms.asTermList(str.length())); } /** * Concatenates the two given strings */ @Predicate(name = "stdlib_string_concat") public static Set<List<ConstantTerm<String>>> stringConcat(String s1, String s2) { return Collections.singleton(Terms.asTermList(s1 + s2)); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.any23.extractor.html; import org.w3c.dom.Document; import org.w3c.dom.NamedNodeMap; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.w3c.dom.traversal.DocumentTraversal; import org.w3c.dom.traversal.NodeFilter; import org.w3c.dom.traversal.NodeIterator; import javax.xml.transform.OutputKeys; import javax.xml.transform.Result; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerConfigurationException; import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerFactory; import javax.xml.transform.TransformerFactoryConfigurationError; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamResult; import javax.xml.xpath.XPath; import javax.xml.xpath.XPathConstants; import javax.xml.xpath.XPathExpressionException; import javax.xml.xpath.XPathFactory; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.StringWriter; import java.io.UnsupportedEncodingException; import java.util.ArrayList; import java.util.List; import java.util.regex.Pattern; /** * This class provides utility methods for DOM manipulation. * It is separated from {@link HTMLDocument} so that its methods * can be run on single DOM nodes without having to wrap them * into an HTMLDocument. * We use a mix of XPath and DOM manipulation. * <p/> * This is likely to be a performance bottleneck but at least * everything is localized here. * <p/> */ public class DomUtils { private static final String[] EMPTY_STRING_ARRAY = new String[0]; private final static XPath xPathEngine = XPathFactory.newInstance().newXPath(); private DomUtils(){} /** * Given a node this method returns the index corresponding to such node * within the list of the children of its parent node. * * @param n the node of which returning the index. * @return a non negative number. */ public static int getIndexInParent(Node n) { Node parent = n.getParentNode(); if(parent == null) { return 0; } NodeList nodes = parent.getChildNodes(); int counter = -1; for(int i = 0; i < nodes.getLength(); i++) { Node current = nodes.item(i); if ( current.getNodeType() == n.getNodeType() && current.getNodeName().equals( n.getNodeName() ) ) { counter++; } if( current.equals(n) ) { return counter; } } throw new IllegalStateException("Cannot find a child within its parent node list."); } /** * Does a reverse walking of the DOM tree to generate a unique XPath * expression leading to this node. The XPath generated is the canonical * one based on sibling index: /html[1]/body[1]/div[2]/span[3] etc.. * * @param node the input node. * @return the XPath location of node as String. */ public static String getXPathForNode(Node node) { final StringBuilder sb = new StringBuilder(); Node parent = node; while(parent != null && parent.getNodeType() != Node.DOCUMENT_NODE) { sb.insert(0, "]"); sb.insert(0, getIndexInParent(parent) + 1); sb.insert(0, "["); sb.insert(0, parent.getNodeName()); sb.insert(0, "/"); parent = parent.getParentNode(); } return sb.toString(); } /** * Returns a list of tag names representing the path from * the document root to the given node <i>n</i>. * * @param n the node for which retrieve the path. * @return a sequence of HTML tag names. */ public static String[] getXPathListForNode(Node n) { if(n == null) { return EMPTY_STRING_ARRAY; } List<String> ancestors = new ArrayList<String>(); ancestors.add( String.format("%s[%s]", n.getNodeName(), getIndexInParent(n) ) ); Node parent = n.getParentNode(); while(parent != null) { ancestors.add(0, String.format("%s[%s]", parent.getNodeName(), getIndexInParent(parent) ) ); parent = parent.getParentNode(); } return ancestors.toArray( new String[ancestors.size()] ); } /** * Returns the row/col location of the given node. * * @param n input node. * @return an array of two elements of type * <code>[&lt;begin-row&gt;, &lt;begin-col&gt;, &lt;end-row&gt; &lt;end-col&gt;]</code> * or <code>null</code> if not possible to extract such data. */ public static int[] getNodeLocation(Node n) { if(n == null) throw new NullPointerException("node cannot be null."); final TagSoupParser.ElementLocation elementLocation = (TagSoupParser.ElementLocation) n.getUserData( TagSoupParser.ELEMENT_LOCATION ); if(elementLocation == null) return null; return new int[]{ elementLocation.getBeginLineNumber(), elementLocation.getBeginColumnNumber(), elementLocation.getEndLineNumber(), elementLocation.getEndColumnNumber() }; } /** * Checks whether a node is ancestor or same of another node. * * @param candidateAncestor the candidate ancestor node. * @param candidateSibling the candidate sibling node. * @param strict if <code>true</code> is not allowed that the ancestor and sibling can be the same node. * @return <code>true</code> if <code>candidateSibling</code> is ancestor of <code>candidateSibling</code>, * <code>false</code> otherwise. */ public static boolean isAncestorOf(Node candidateAncestor, Node candidateSibling, boolean strict) { if(candidateAncestor == null) throw new NullPointerException("candidate ancestor cannot be null null."); if(candidateSibling == null) throw new NullPointerException("candidate sibling cannot be null null." ); if(strict && candidateAncestor.equals(candidateSibling)) return false; Node parent = candidateSibling; while(parent != null) { if(parent.equals(candidateAncestor)) return true; parent = parent.getParentNode(); } return false; } /** * Checks whether a node is ancestor or same of another node. As * {@link #isAncestorOf(org.w3c.dom.Node, org.w3c.dom.Node, boolean)} with <code>strict=false</code>. * * @param candidateAncestor the candidate ancestor node. * @param candidateSibling the candidate sibling node. * @return <code>true</code> if <code>candidateSibling</code> is ancestor of <code>candidateSibling</code>, * <code>false</code> otherwise. */ public static boolean isAncestorOf(Node candidateAncestor, Node candidateSibling) { return isAncestorOf(candidateAncestor, candidateSibling, false); } /** * Finds all nodes that have a declared class. * Note that the className is transformed to lower case before being * matched against the DOM. * @param root the root node from which start searching. * @param className the name of the filtered class. * @return list of matching nodes or an empty list. */ public static List<Node> findAllByClassName(Node root, String className) { return findAllBy(root, null, "class", className.toLowerCase()); } /** * Finds all nodes that have a declared attribute. * Note that the className is transformed to lower case before being * matched against the DOM. * @param root the root node from which start searching. * @param attrName the name of the filtered attribue. * @return list of matching nodes or an empty list. */ public static List<Node> findAllByAttributeName(Node root, String attrName) { return findAllBy(root, null, attrName, null); } public static List<Node> findAllByAttributeContains(Node node, String attrName, String attrContains) { return findAllBy(node, null, attrName, attrContains); } public static List<Node> findAllByTag(Node root, String tagName) { return findAllBy(root, tagName, null, null); } public static List<Node> findAllByTagAndClassName(Node root, final String tagName, final String className) { return findAllBy(root, tagName, "class", className); } /** * Mimics the JS DOM API, or prototype's $() */ public static Node findNodeById(Node root, String id) { Node node; try { String xpath = "//*[@id='" + id + "']"; node = (Node) xPathEngine.evaluate(xpath, root, XPathConstants.NODE); } catch (XPathExpressionException ex) { throw new RuntimeException("Should not happen", ex); } return node; } /** * Returns a NodeList composed of all the nodes that match an XPath * expression, which must be valid. */ public static List<Node> findAll(Node node, String xpath) { if(node == null) { throw new NullPointerException("node cannot be null."); } try { NodeList nodes = (NodeList) xPathEngine.evaluate(xpath, node, XPathConstants.NODESET); List<Node> result = new ArrayList<Node>(nodes.getLength()); for (int i = 0; i < nodes.getLength(); i++) { result.add(nodes.item(i)); } return result; } catch (XPathExpressionException ex) { throw new IllegalArgumentException("Illegal XPath expression: " + xpath, ex); } } /** * Gets the string value of an XPath expression. */ public static String find(Node node, String xpath) { try { String val = (String) xPathEngine.evaluate(xpath, node, XPathConstants.STRING); if (null == val) return ""; return val; } catch (XPathExpressionException ex) { throw new IllegalArgumentException("Illegal XPath expression: " + xpath, ex); } } /** * Tells if an element has a class name <b>not checking the parents * in the hierarchy</b> mimicking the <i>CSS</i> .foo match. */ public static boolean hasClassName(Node node, String className) { return hasAttribute(node, "class", className); } /** * Checks the presence of an attribute value in attributes that * contain whitespace-separated lists of values. The semantic is the * CSS classes' ones: "foo" matches "bar foo", "foo" but not "foob" */ public static boolean hasAttribute(Node node, String attributeName, String className) { // regex love, maybe faster but less easy to understand // Pattern pattern = Pattern.compile("(^|\\s+)"+className+"(\\s+|$)"); String attr = readAttribute(node, attributeName); for (String c : attr.split("\\s+")) if (c.equalsIgnoreCase(className)) return true; return false; } /** * Checks the presence of an attribute in the given <code>node</code>. * * @param node the node container. * @param attributeName the name of the attribute. */ public static boolean hasAttribute(Node node, String attributeName) { return readAttribute(node, attributeName, null) != null; } /** * Verifies if the given target node is an element. * * @param target * @return <code>true</code> if the element the node is an element, * <code>false</code> otherwise. */ public static boolean isElementNode(Node target) { return Node.ELEMENT_NODE == target.getNodeType(); } /** * Reads the value of the specified <code>attribute</code>, returning the * <code>defaultValue</code> string if not present. * * @param node node to read the attribute. * @param attribute attribute name. * @param defaultValue the default value to return if attribute is not found. * @return the attribute value or <code>defaultValue</code> if not found. */ public static String readAttribute(Node node, String attribute, String defaultValue) { NamedNodeMap attributes = node.getAttributes(); if (null == attributes) return defaultValue; Node attr = attributes.getNamedItem(attribute); if (null==attr) return defaultValue; return attr.getNodeValue(); } /** * Reads the value of the first <i>attribute</i> which name matches with the specified <code>attributePrefix</code>. * Returns the <code>defaultValue</code> if not found. * * @param node node to look for attributes. * @param attributePrefix attribute prefix. * @param defaultValue default returned value. * @return the value found or default. */ public static String readAttributeWithPrefix(Node node, String attributePrefix, String defaultValue) { final NamedNodeMap attributes = node.getAttributes(); if (null == attributes) { return defaultValue; } Node attribute; for (int a = 0; a < attributes.getLength(); a++) { attribute = attributes.item(a); if (attribute.getNodeName().startsWith(attributePrefix)) { return attribute.getNodeValue(); } } return defaultValue; } /** * Reads the value of an <code>attribute</code>, returning the * empty string if not present. * * @param node node to read the attribute. * @param attribute attribute name. * @return the attribute value or <code>""</code> if not found. */ public static String readAttribute(Node node, String attribute) { return readAttribute(node, attribute, ""); } /** * Given a <i>DOM</i> {@link Node} produces the <i>XML</i> serialization * omitting the <i>XML declaration</i>. * * @param node node to be serialized. * @param indent if <code>true</code> the output is indented. * @return the XML serialization. * @throws TransformerException if an error occurs during the * serializator initialization and activation. * @throws java.io.IOException */ public static String serializeToXML(Node node, boolean indent) throws TransformerException, IOException { final DOMSource domSource = new DOMSource(node); final Transformer transformer = TransformerFactory.newInstance().newTransformer(); transformer.setOutputProperty(OutputKeys.OMIT_XML_DECLARATION, "yes"); transformer.setOutputProperty(OutputKeys.METHOD, "xml"); transformer.setOutputProperty(OutputKeys.ENCODING, "UTF-8"); if(indent) { transformer.setOutputProperty(OutputKeys.INDENT, "yes"); transformer.setOutputProperty("{http://xml.apache.org/xslt}indent-amount", "4"); } final StringWriter sw = new StringWriter(); final StreamResult sr = new StreamResult(sw); transformer.transform(domSource, sr); sw.close(); return sw.toString(); } /** * High performance implementation of {@link #findAll(org.w3c.dom.Node, String)}. * * @param root root node to start search. * @param tagName name of target tag. * @param attrName name of attribute filter. * @param attrContains expected content for attribute. * @return */ private static List<Node> findAllBy(Node root, final String tagName, final String attrName, String attrContains) { DocumentTraversal documentTraversal = (DocumentTraversal) root.getOwnerDocument(); if (documentTraversal == null) { documentTraversal = (DocumentTraversal) root; } final Pattern attrContainsPattern; if (attrContains != null && !attrContains.equals("*")) { attrContainsPattern = Pattern.compile("(^|\\s)" + attrContains + "(\\s|$)", Pattern.CASE_INSENSITIVE); } else { attrContainsPattern = null; } final List<Node> result = new ArrayList<Node>(); NodeIterator nodeIterator = documentTraversal.createNodeIterator( root, NodeFilter.SHOW_ELEMENT, new NodeFilter() { @Override public short acceptNode(Node node) { if (node.getNodeType() == Node.ELEMENT_NODE) { if (tagName != null && !tagName.equals("*") && !tagName.equals(node.getNodeName())) { // tagName given but doesn't match. return FILTER_ACCEPT; } if (attrName != null) { Node attrNameNode = node.getAttributes().getNamedItem(attrName); if (attrNameNode == null) { // attrName given but doesn't match return FILTER_ACCEPT; } if ( attrContainsPattern != null && !attrContainsPattern.matcher(attrNameNode.getNodeValue()).find() ) { // attrContains given but doesn't match return FILTER_ACCEPT; } } result.add(node); } return FILTER_ACCEPT; } }, false); // To populate result we only need to iterate... while (nodeIterator.nextNode() != null) ; // We have to explicitly declare we are done with this nodeIterator to free it's resources. nodeIterator.detach(); return result; } /** * Given a {@link org.w3c.dom.Document} this method will return an * input stream representing that document. * @param doc the input {@link org.w3c.dom.Document} * @return an {@link java.io.InputStream} */ public static InputStream documentToInputStream(Document doc) { DOMSource source = new DOMSource(doc); StringWriter xmlAsWriter = new StringWriter(); StreamResult result = new StreamResult(xmlAsWriter); try { TransformerFactory.newInstance().newTransformer().transform(source, result); } catch (TransformerConfigurationException e) { throw new RuntimeException("Error within Document to InputStream transformation configuration!"); } catch (TransformerException e) { throw new RuntimeException("Error whilst transforming the Document to InputStream!"); } catch (TransformerFactoryConfigurationError e) { throw new RuntimeException("Error within Document to InputStream transformation configuration!"); } InputStream is = null; try { is = new ByteArrayInputStream(xmlAsWriter.toString().getBytes("UTF-8")); } catch (UnsupportedEncodingException e) { e.printStackTrace(); } return is; } /** * Convert a w3c dom node to a InputStream * @param node * @return * @throws TransformerException */ public static InputStream nodeToInputStream(Node node) { ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); Result outputTarget = new StreamResult(outputStream); Transformer t = null; try { t = TransformerFactory.newInstance().newTransformer(); } catch (TransformerConfigurationException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (TransformerFactoryConfigurationError e) { // TODO Auto-generated catch block e.printStackTrace(); } t.setOutputProperty(OutputKeys.OMIT_XML_DECLARATION, "yes"); try { t.transform(new DOMSource(node), outputTarget); } catch (TransformerException e) { // TODO Auto-generated catch block e.printStackTrace(); } return new ByteArrayInputStream(outputStream.toByteArray()); } }
package coopy; /** * * For other languages, I implemented a simple wrapper around the most * obvious 2D representation of a table, to avoid unnecessary copies. * In Java, the 2D representations aren't really so convenient to * use that this is worth doing, if we are modifying the list. * So this wrapper switches representation once we start making * structural modifications. * */ public class JavaTableView extends haxe.lang.HxObject implements coopy.Table { private Object[][] basic_data; private coopy.SimpleTable edit_data; private int w; private int h; public JavaTableView() { basic_data = null; edit_data = new coopy.SimpleTable(1,1); w = h = 0; } public JavaTableView(Object[][] data) { basic_data = data; edit_data = null; h = basic_data.length; if (h>0) { w = basic_data[0].length; } else { w = 0; } } public int get_width() { if (edit_data!=null) return edit_data.get_width(); return this.w; } public int get_height() { if (edit_data!=null) return edit_data.get_height(); return this.h; } private void needToEdit() { if (edit_data==null) { edit_data = new coopy.SimpleTable(w,h); for (int y=0; y<h; y++) { for (int x=0; x<w; x++) { edit_data.setCell(x,y,basic_data[y][x]); } } w = 0; h = 0; } basic_data = null; } public Object[][] getData() { if (basic_data == null) { int ww = get_width(); int hh = get_height(); basic_data = new Object[hh][ww]; for (int y=0; y<h; y++) { for (int x=0; x<w; x++) { basic_data[y][x] = edit_data.getCell(x,y); } } } return basic_data; } public Object getCell(int x, int y) { if (edit_data!=null) return edit_data.getCell(x,y); return basic_data[y][x]; } public void setCell(int x, int y, java.lang.Object c) { if (edit_data!=null) { edit_data.setCell(x,y,c); return; } basic_data[y][x] = c; } public coopy.View getCellView() { return new coopy.SimpleView(); } public boolean isResizable() { return true; } public boolean resize(int w, int h) { needToEdit(); return edit_data.resize(w,h); } public void clear() { Object[][] data = {}; this.basic_data = data; this.edit_data = null; } public boolean insertOrDeleteRows(haxe.root.Array<java.lang.Object> fate, int hfate) { needToEdit(); return edit_data.insertOrDeleteRows(fate,hfate); } public boolean insertOrDeleteColumns(haxe.root.Array<java.lang.Object> fate, int wfate) { needToEdit(); return edit_data.insertOrDeleteColumns(fate,wfate); } public boolean trimBlank() { needToEdit(); return edit_data.trimBlank(); } @Override public java.lang.String toString() { return coopy.SimpleTable.tableToString(this); } @Override public JavaTableView clone() { JavaTableView result = new JavaTableView(); result.resize(w,h); for (int c=0; c<w; c++) { for (int r=0; r<h; r++) { result.setCell(c,r,getCell(c,r)); } } return result; } @Override public JavaTableView create() { return new JavaTableView(); } @Override public coopy.Meta getMeta() { return null; } /* * * The following methods shouldn't be needed since daff should * never need dynamic access to this class. However, there's * a bug in haxe related to setters/getters that I need to chase. * So there are here for now. * */ public coopy.Table getTable() { return this; } @Override public double __hx_setField_f(java.lang.String field, double value, boolean handleProperties) { throw null; } @Override public Object __hx_setField(java.lang.String field, java.lang.Object value, boolean handleProperties) { throw null; } @Override public java.lang.Object __hx_getField(java.lang.String field, boolean throwErrors, boolean isCheck, boolean handleProperties) { boolean onwards = true; switch (field.hashCode()) { case -75605984: if (field.equals("getData")) { onwards = false; return ((haxe.lang.Function) (new haxe.lang.Closure(((java.lang.Object) (this) ), haxe.lang.Runtime.toString("getData"))) ); } break; case -510954926: if (field.equals("trimBlank")) { onwards = false; return ((haxe.lang.Function) (new haxe.lang.Closure(((java.lang.Object) (this) ), haxe.lang.Runtime.toString("trimBlank"))) ); } break; case 3076010: if (field.equals("data")) { onwards = false; return this.getData(); } break; case 1889278614: if (field.equals("insertOrDeleteColumns")) { onwards = false; return ((haxe.lang.Function) (new haxe.lang.Closure(((java.lang.Object) (this) ), haxe.lang.Runtime.toString("insertOrDeleteColumns"))) ); } break; case 1186308544: if (field.equals("insertOrDeleteRows")) { onwards = false; return ((haxe.lang.Function) (new haxe.lang.Closure(((java.lang.Object) (this) ), haxe.lang.Runtime.toString("insertOrDeleteRows"))) ); } break; case 94746189: if (field.equals("clear")) { onwards = false; return ((haxe.lang.Function) (new haxe.lang.Closure(((java.lang.Object) (this) ), haxe.lang.Runtime.toString("clear"))) ); } break; case 1965941272: if (field.equals("getTable")) { onwards = false; return ((haxe.lang.Function) (new haxe.lang.Closure(((java.lang.Object) (this) ), haxe.lang.Runtime.toString("getTable"))) ); } break; case -934437708: if (field.equals("resize")) { onwards = false; return ((haxe.lang.Function) (new haxe.lang.Closure(((java.lang.Object) (this) ), haxe.lang.Runtime.toString("resize"))) ); } break; case -1221029593: if (field.equals("height")) { onwards = false; return this.get_height(); } break; case -972315487: if (field.equals("isResizable")) { onwards = false; return ((haxe.lang.Function) (new haxe.lang.Closure(((java.lang.Object) (this) ), haxe.lang.Runtime.toString("isResizable"))) ); } break; case 113126854: if (field.equals("width")) { onwards = false; return this.get_width(); } break; case 1160377501: if (field.equals("getCellView")) { onwards = false; return ((haxe.lang.Function) (new haxe.lang.Closure(((java.lang.Object) (this) ), haxe.lang.Runtime.toString("getCellView"))) ); } break; case -1776922004: if (field.equals("toString")) { onwards = false; return ((haxe.lang.Function) (new haxe.lang.Closure(((java.lang.Object) (this) ), haxe.lang.Runtime.toString("toString"))) ); } break; case 1150076829: if (field.equals("get_width")) { onwards = false; return ((haxe.lang.Function) (new haxe.lang.Closure(((java.lang.Object) (this) ), haxe.lang.Runtime.toString("get_width"))) ); } break; case 1984477412: if (field.equals("setCell")) { onwards = false; return ((haxe.lang.Function) (new haxe.lang.Closure(((java.lang.Object) (this) ), haxe.lang.Runtime.toString("setCell"))) ); } break; case 859648560: if (field.equals("get_height")) { onwards = false; return ((haxe.lang.Function) (new haxe.lang.Closure(((java.lang.Object) (this) ), haxe.lang.Runtime.toString("get_height"))) ); } break; case -75632168: if (field.equals("getCell")) { onwards = false; return ((haxe.lang.Function) (new haxe.lang.Closure(((java.lang.Object) (this) ), haxe.lang.Runtime.toString("getCell"))) ); } break; } // pending: add getMetaView here and in __hx_getField_f if (onwards) { return super.__hx_getField(field, throwErrors, isCheck, handleProperties); } throw null; } @Override public double __hx_getField_f(java.lang.String field, boolean throwErrors, boolean handleProperties) { boolean onwards = true; switch (field.hashCode()) { case -1221029593: if (field.equals("height")) { onwards = false; return (double)(this.get_height()); } break; case 113126854: if (field.equals("width")) { onwards = false; return (double)(this.get_width()); } break; } if (onwards) { return super.__hx_getField_f(field, throwErrors, handleProperties); } throw null; } @Override public java.lang.Object __hx_invokeField(java.lang.String field, java.lang.Object[] dynargs) { boolean onwards = true; switch (field.hashCode()) { case -75605984: if (field.equals("getData")) { onwards = false; return this.getData(); } break; case -510954926: if (field.equals("trimBlank")) { onwards = false; return this.trimBlank(); } break; case 1965941272: if (field.equals("getTable")) { onwards = false; return this.getTable(); } break; case 1889278614: if (field.equals("insertOrDeleteColumns")) { onwards = false; return this.insertOrDeleteColumns(((haxe.root.Array<java.lang.Object>) (dynargs[0]) ), ((int) (haxe.lang.Runtime.toInt(dynargs[1])) )); } break; case 1150076829: if (field.equals("get_width")) { onwards = false; return this.get_width(); } break; case 1186308544: if (field.equals("insertOrDeleteRows")) { onwards = false; return this.insertOrDeleteRows(((haxe.root.Array<java.lang.Object>) (dynargs[0]) ), ((int) (haxe.lang.Runtime.toInt(dynargs[1])) )); } break; case 859648560: if (field.equals("get_height")) { onwards = false; return this.get_height(); } break; case 94746189: if (field.equals("clear")) { onwards = false; this.clear(); } break; case -934437708: if (field.equals("resize")) { onwards = false; return this.resize(((int) (haxe.lang.Runtime.toInt(dynargs[0])) ), ((int) (haxe.lang.Runtime.toInt(dynargs[1])) )); } break; case -75632168: if (field.equals("getCell")) { onwards = false; return this.getCell(((int) (haxe.lang.Runtime.toInt(dynargs[0])) ), ((int) (haxe.lang.Runtime.toInt(dynargs[1])) )); } break; case -972315487: if (field.equals("isResizable")) { onwards = false; return this.isResizable(); } break; case 1984477412: if (field.equals("setCell")) { onwards = false; this.setCell(((int) (haxe.lang.Runtime.toInt(dynargs[0])) ), ((int) (haxe.lang.Runtime.toInt(dynargs[1])) ), dynargs[2]); } break; case 1160377501: if (field.equals("getCellView")) { onwards = false; return this.getCellView(); } break; case -1776922004: if (field.equals("toString")) { onwards = false; return this.toString(); } break; } if (onwards) { return super.__hx_invokeField(field, dynargs); } return null; } @Override public void __hx_getFields(haxe.root.Array<java.lang.String> baseArr) { baseArr.push("width"); baseArr.push("height"); baseArr.push("data"); super.__hx_getFields(baseArr); } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. package com.azure.messaging.eventhubs; import com.azure.core.amqp.implementation.MessageSerializer; import com.azure.core.amqp.models.AmqpAnnotatedMessage; import com.azure.core.exception.AzureException; import com.azure.core.util.Context; import com.azure.core.util.logging.ClientLogger; import com.azure.messaging.eventhubs.implementation.ManagementChannel; import com.azure.messaging.eventhubs.models.LastEnqueuedEventProperties; import org.apache.qpid.proton.amqp.Binary; import org.apache.qpid.proton.amqp.Symbol; import org.apache.qpid.proton.amqp.messaging.AmqpValue; import org.apache.qpid.proton.amqp.messaging.ApplicationProperties; import org.apache.qpid.proton.amqp.messaging.Data; import org.apache.qpid.proton.amqp.messaging.DeliveryAnnotations; import org.apache.qpid.proton.amqp.messaging.MessageAnnotations; import org.apache.qpid.proton.message.Message; import java.time.Instant; import java.util.Collections; import java.util.Date; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Objects; import static com.azure.core.amqp.AmqpMessageConstant.ENQUEUED_TIME_UTC_ANNOTATION_NAME; import static com.azure.core.amqp.AmqpMessageConstant.OFFSET_ANNOTATION_NAME; import static com.azure.core.amqp.AmqpMessageConstant.PARTITION_KEY_ANNOTATION_NAME; import static com.azure.core.amqp.AmqpMessageConstant.SEQUENCE_NUMBER_ANNOTATION_NAME; import static com.azure.messaging.eventhubs.implementation.ManagementChannel.MANAGEMENT_RESULT_LAST_ENQUEUED_OFFSET; import static com.azure.messaging.eventhubs.implementation.ManagementChannel.MANAGEMENT_RESULT_LAST_ENQUEUED_SEQUENCE_NUMBER; import static com.azure.messaging.eventhubs.implementation.ManagementChannel.MANAGEMENT_RESULT_LAST_ENQUEUED_TIME_UTC; import static com.azure.messaging.eventhubs.implementation.ManagementChannel.MANAGEMENT_RESULT_RUNTIME_INFO_RETRIEVAL_TIME_UTC; /** * Utility class for converting {@link EventData} to {@link Message}. */ class EventHubMessageSerializer implements MessageSerializer { private final ClientLogger logger = new ClientLogger(EventHubMessageSerializer.class); private static final Symbol LAST_ENQUEUED_SEQUENCE_NUMBER = Symbol.getSymbol(MANAGEMENT_RESULT_LAST_ENQUEUED_SEQUENCE_NUMBER); private static final Symbol LAST_ENQUEUED_OFFSET = Symbol.getSymbol(MANAGEMENT_RESULT_LAST_ENQUEUED_OFFSET); private static final Symbol LAST_ENQUEUED_TIME_UTC = Symbol.getSymbol(MANAGEMENT_RESULT_LAST_ENQUEUED_TIME_UTC); private static final Symbol RETRIEVAL_TIME_UTC = Symbol.getSymbol(MANAGEMENT_RESULT_RUNTIME_INFO_RETRIEVAL_TIME_UTC); /** * Gets the serialized size of the AMQP message. */ @Override public int getSize(Message amqpMessage) { if (amqpMessage == null) { return 0; } int payloadSize = getPayloadSize(amqpMessage); // EventData - accepts only PartitionKey - which is a String & stuffed into MessageAnnotation final MessageAnnotations messageAnnotations = amqpMessage.getMessageAnnotations(); final ApplicationProperties applicationProperties = amqpMessage.getApplicationProperties(); int annotationsSize = 0; int applicationPropertiesSize = 0; if (messageAnnotations != null) { final Map<Symbol, Object> map = messageAnnotations.getValue(); for (Map.Entry<Symbol, Object> entry : map.entrySet()) { final int size = sizeof(entry.getKey()) + sizeof(entry.getValue()); annotationsSize += size; } } if (applicationProperties != null) { final Map<String, Object> map = applicationProperties.getValue(); for (Map.Entry<String, Object> entry : map.entrySet()) { final int size = sizeof(entry.getKey()) + sizeof(entry.getValue()); applicationPropertiesSize += size; } } return annotationsSize + applicationPropertiesSize + payloadSize; } /** * Creates the AMQP message represented by this {@code object}. Currently, only supports serializing {@link * EventData}. * * @param object Concrete object to deserialize. * * @return A new AMQP message for this {@code object}. * * @throws IllegalArgumentException if {@code object} is not an instance of {@link EventData}. */ @Override public <T> Message serialize(T object) { Objects.requireNonNull(object, "'object' to serialize cannot be null."); if (!(object instanceof EventData)) { throw logger.logExceptionAsError(new IllegalArgumentException( "Cannot serialize object that is not EventData. Class: " + object.getClass())); } final EventData eventData = (EventData) object; final AmqpAnnotatedMessage amqpAnnotatedMessage = eventData.getRawAmqpMessage(); final Message protonJ = MessageUtils.toProtonJMessage(amqpAnnotatedMessage); // Removing any system properties like ENQUEUED TIME, OFFSET, SEQUENCE NUMBER. // These values are populated in the case that the user received the event and is // resending the event. if (protonJ.getMessageAnnotations() != null && protonJ.getMessageAnnotations().getValue() != null) { EventData.RESERVED_SYSTEM_PROPERTIES.forEach(key -> protonJ.getMessageAnnotations().getValue().remove(Symbol.valueOf(key))); } return protonJ; } @SuppressWarnings("unchecked") @Override public <T> T deserialize(Message message, Class<T> clazz) { Objects.requireNonNull(message, "'message' cannot be null."); Objects.requireNonNull(clazz, "'clazz' cannot be null."); if (clazz == PartitionProperties.class || clazz == EventHubProperties.class) { return deserializeManagementResponse(message, clazz); } else if (clazz == EventData.class) { return (T) deserializeEventData(message); } else if (clazz == LastEnqueuedEventProperties.class) { return (T) deserializeEnqueuedEventProperties(message); } else { throw logger.logExceptionAsError(new IllegalArgumentException( "Deserialization only supports EventData, PartitionProperties, or EventHubProperties.")); } } @Override public <T> List<T> deserializeList(Message message, Class<T> clazz) { return Collections.singletonList(deserialize(message, clazz)); } @SuppressWarnings("unchecked") private <T> T deserializeManagementResponse(Message message, Class<T> deserializedType) { if (!(message.getBody() instanceof AmqpValue)) { throw logger.logExceptionAsError(new IllegalArgumentException( "Expected message.getBody() to be AmqpValue, but is: " + message.getBody())); } final AmqpValue body = (AmqpValue) message.getBody(); if (!(body.getValue() instanceof Map)) { throw logger.logExceptionAsError(new IllegalArgumentException( "Expected message.getBody().getValue() to be of type Map")); } final Map<?, ?> amqpBody = (Map<?, ?>) body.getValue(); if (deserializedType == PartitionProperties.class) { return (T) toPartitionProperties(amqpBody); } else if (deserializedType == EventHubProperties.class) { return (T) toEventHubProperties(amqpBody); } else { throw logger.logExceptionAsError(new IllegalArgumentException(String.format( Messages.CLASS_NOT_A_SUPPORTED_TYPE, deserializedType))); } } /** * Tries to deserialize {@link LastEnqueuedEventProperties} from an AMQP message. * * @param message AMQP message from the message broker. * * @return An instance of {@link LastEnqueuedEventProperties} with extracted properties. Otherwise, {@code null} if * there were no delivery annotations in the message. */ private LastEnqueuedEventProperties deserializeEnqueuedEventProperties(Message message) { final DeliveryAnnotations annotations = message.getDeliveryAnnotations(); if (annotations == null || annotations.getValue() == null) { return null; } final Map<Symbol, Object> deliveryAnnotations = annotations.getValue(); final Long lastSequenceNumber = getValue(deliveryAnnotations, LAST_ENQUEUED_SEQUENCE_NUMBER, Long.class); final String lastEnqueuedOffset = getValue(deliveryAnnotations, LAST_ENQUEUED_OFFSET, String.class); final Instant lastEnqueuedTime = getValue(deliveryAnnotations, LAST_ENQUEUED_TIME_UTC, Date.class).toInstant(); final Instant retrievalTime = getValue(deliveryAnnotations, RETRIEVAL_TIME_UTC, Date.class).toInstant(); return new LastEnqueuedEventProperties(lastSequenceNumber, Long.valueOf(lastEnqueuedOffset), lastEnqueuedTime, retrievalTime); } private EventData deserializeEventData(Message message) { final AmqpAnnotatedMessage amqpAnnotatedMessage = MessageUtils.toAmqpAnnotatedMessage(message); // Convert system properties to their respective types. final Map<String, Object> messageAnnotations = amqpAnnotatedMessage.getMessageAnnotations(); if (!messageAnnotations.containsKey(OFFSET_ANNOTATION_NAME.getValue())) { throw logger.logExceptionAsError(new IllegalStateException(String.format(Locale.US, "offset: %s should always be in map.", OFFSET_ANNOTATION_NAME.getValue()))); } else if (!messageAnnotations.containsKey(ENQUEUED_TIME_UTC_ANNOTATION_NAME.getValue())) { throw logger.logExceptionAsError(new IllegalStateException(String.format(Locale.US, "enqueuedTime: %s should always be in map.", ENQUEUED_TIME_UTC_ANNOTATION_NAME.getValue()))); } else if (!messageAnnotations.containsKey(SEQUENCE_NUMBER_ANNOTATION_NAME.getValue())) { throw logger.logExceptionAsError(new IllegalStateException(String.format(Locale.US, "enqueuedTime: %s should always be in map.", SEQUENCE_NUMBER_ANNOTATION_NAME.getValue()))); } final Object enqueuedTimeObject = messageAnnotations.get(ENQUEUED_TIME_UTC_ANNOTATION_NAME.getValue()); final Instant enqueuedTime; if (enqueuedTimeObject instanceof Date) { enqueuedTime = ((Date) enqueuedTimeObject).toInstant(); } else if (enqueuedTimeObject instanceof Instant) { enqueuedTime = (Instant) enqueuedTimeObject; } else { throw logger.logExceptionAsError(new IllegalStateException(new IllegalStateException( String.format(Locale.US, "enqueuedTime is not a known type. Value: %s. Type: %s", enqueuedTimeObject, enqueuedTimeObject.getClass())))); } final String partitionKey = (String) messageAnnotations.get(PARTITION_KEY_ANNOTATION_NAME.getValue()); final long offset = getAsLong(messageAnnotations, OFFSET_ANNOTATION_NAME.getValue()); final long sequenceNumber = getAsLong(messageAnnotations, SEQUENCE_NUMBER_ANNOTATION_NAME.getValue()); // Put the properly converted time back into the dictionary. messageAnnotations.put(OFFSET_ANNOTATION_NAME.getValue(), offset); messageAnnotations.put(ENQUEUED_TIME_UTC_ANNOTATION_NAME.getValue(), enqueuedTime); messageAnnotations.put(SEQUENCE_NUMBER_ANNOTATION_NAME.getValue(), sequenceNumber); final SystemProperties systemProperties = new SystemProperties(amqpAnnotatedMessage, offset, enqueuedTime, sequenceNumber, partitionKey); final EventData eventData = new EventData(amqpAnnotatedMessage, systemProperties, Context.NONE); message.clear(); return eventData; } private EventHubProperties toEventHubProperties(Map<?, ?> amqpBody) { return new EventHubProperties( getValue(amqpBody, ManagementChannel.MANAGEMENT_ENTITY_NAME_KEY, String.class), getDate(amqpBody, ManagementChannel.MANAGEMENT_RESULT_CREATED_AT), getValue(amqpBody, ManagementChannel.MANAGEMENT_RESULT_PARTITION_IDS, String[].class)); } private PartitionProperties toPartitionProperties(Map<?, ?> amqpBody) { return new PartitionProperties( getValue(amqpBody, ManagementChannel.MANAGEMENT_ENTITY_NAME_KEY, String.class), getValue(amqpBody, ManagementChannel.MANAGEMENT_PARTITION_NAME_KEY, String.class), getValue(amqpBody, ManagementChannel.MANAGEMENT_RESULT_BEGIN_SEQUENCE_NUMBER, Long.class), getValue(amqpBody, MANAGEMENT_RESULT_LAST_ENQUEUED_SEQUENCE_NUMBER, Long.class), getValue(amqpBody, ManagementChannel.MANAGEMENT_RESULT_LAST_ENQUEUED_OFFSET, String.class), getDate(amqpBody, ManagementChannel.MANAGEMENT_RESULT_LAST_ENQUEUED_TIME_UTC), getValue(amqpBody, ManagementChannel.MANAGEMENT_RESULT_PARTITION_IS_EMPTY, Boolean.class)); } /** * Gets the property value as a Long. * * @param amqpBody Map to get value from. * @param key The key to get value of. * * @return The corresponding long. * * @throws IllegalStateException if the property is not a long. */ private long getAsLong(Map<String, Object> amqpBody, String key) { final Object object = amqpBody.get(key); final long value; if (object instanceof String) { try { value = Long.parseLong((String) object); } catch (NumberFormatException e) { throw logger.logExceptionAsError(new IllegalStateException("'" + key + "' could not be parsed into a Long. Value: " + object, e)); } } else if (object instanceof Long) { value = (Long) object; } else { throw logger.logExceptionAsError(new IllegalStateException(new IllegalStateException( String.format(Locale.US, "'" + key + "' value is not a known type. Value: %s. Type: %s", object, object.getClass())))); } return value; } private <T> T getValue(Map<?, ?> amqpBody, String key, Class<T> clazz) { if (!amqpBody.containsKey(key)) { throw logger.logExceptionAsError(new AzureException( String.format("AMQP body did not contain expected field '%s'.", key))); } return getValue(amqpBody.get(key), key, clazz); } private <T> T getValue(Map<Symbol, Object> amqpBody, Symbol key, Class<T> clazz) { if (!amqpBody.containsKey(key)) { throw logger.logExceptionAsError(new AzureException( String.format("AMQP body did not contain expected field '%s'.", key))); } return getValue(amqpBody.get(key), key, clazz); } @SuppressWarnings("unchecked") private <T> T getValue(Object value, Object key, Class<T> clazz) { if (value == null) { throw logger.logExceptionAsError(new AzureException( String.format("AMQP body did not contain a value for key '%s'.", key))); } else if (value.getClass() != clazz) { throw logger.logExceptionAsError(new AzureException(String.format( "AMQP body did not contain correct value for key '%s'. Expected class: '%s'. Actual: '%s'", key, clazz, value.getClass()))); } return (T) value; } private Instant getDate(Map<?, ?> amqpBody, String key) { final Date value = getValue(amqpBody, key, Date.class); return value.toInstant(); } private static int getPayloadSize(Message msg) { if (msg == null || msg.getBody() == null) { return 0; } if (msg.getBody() instanceof Data) { final Data payloadSection = (Data) msg.getBody(); if (payloadSection == null) { return 0; } final Binary payloadBytes = payloadSection.getValue(); if (payloadBytes == null) { return 0; } return payloadBytes.getLength(); } if (msg.getBody() instanceof AmqpValue) { final AmqpValue amqpValue = (AmqpValue) msg.getBody(); if (amqpValue == null) { return 0; } return amqpValue.getValue().toString().length() * 2; } return 0; } private static int sizeof(Object obj) { if (obj instanceof String) { return obj.toString().length() << 1; } if (obj instanceof Symbol) { return ((Symbol) obj).length() << 1; } if (obj instanceof Integer) { return Integer.BYTES; } if (obj instanceof Long) { return Long.BYTES; } if (obj instanceof Short) { return Short.BYTES; } if (obj instanceof Character) { return Character.BYTES; } if (obj instanceof Float) { return Float.BYTES; } if (obj instanceof Double) { return Double.BYTES; } if (obj instanceof Date) { return 32; } throw new IllegalArgumentException(String.format(Messages.ENCODING_TYPE_NOT_SUPPORTED, obj.getClass())); } }
package com.planet_ink.coffee_mud.Commands; import com.planet_ink.coffee_mud.core.interfaces.*; import com.planet_ink.coffee_mud.core.*; import com.planet_ink.coffee_mud.core.collections.*; import com.planet_ink.coffee_mud.Abilities.interfaces.*; import com.planet_ink.coffee_mud.Areas.interfaces.*; import com.planet_ink.coffee_mud.Behaviors.interfaces.*; import com.planet_ink.coffee_mud.CharClasses.interfaces.*; import com.planet_ink.coffee_mud.Commands.interfaces.*; import com.planet_ink.coffee_mud.Common.interfaces.*; import com.planet_ink.coffee_mud.Common.interfaces.Clan.Authority; import com.planet_ink.coffee_mud.Common.interfaces.Clan.Function; import com.planet_ink.coffee_mud.Exits.interfaces.*; import com.planet_ink.coffee_mud.Items.interfaces.*; import com.planet_ink.coffee_mud.Libraries.interfaces.*; import com.planet_ink.coffee_mud.Locales.interfaces.*; import com.planet_ink.coffee_mud.MOBS.interfaces.*; import com.planet_ink.coffee_mud.Races.interfaces.*; import java.util.*; /* Copyright 2004-2016 Bo Zimmerman Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ public class ClanVote extends StdCommand { public ClanVote(){} private final String[] access=I(new String[]{"CLANVOTE"}); @Override public String[] getAccessWords(){return access;} @Override public boolean execute(MOB mob, List<String> commands, int metaFlags) throws java.io.IOException { StringBuffer msg=new StringBuffer(""); String voteNumStr=(commands.size()>1)?(String)commands.get(commands.size()-1):""; String clanName=""; if(!CMath.isInteger(voteNumStr)) { clanName=(commands.size()>2)?CMParms.combine(commands,1,commands.size()):""; voteNumStr=""; } else clanName=(commands.size()>2)?CMParms.combine(commands,1,commands.size()-1):""; Clan C=null; Integer clanRole=null; for(final Pair<Clan,Integer> c : mob.clans()) if((clanName.length()==0)||(CMLib.english().containsString(c.first.getName(), clanName))) { C=c.first; clanRole=c.second; break; } if((C==null)||(clanRole==null)) { mob.tell(L("You can't vote for anything in @x1.",((clanName.length()==0)?"any clan":clanName))); return false; } else if(!mob.isMonster()) { final Vector<Clan.ClanVote> votesForYou=new Vector<Clan.ClanVote>(); for(final Enumeration<Clan.ClanVote> e=C.votes();e.hasMoreElements();) { final Clan.ClanVote CV=e.nextElement(); if(((CV.function==Clan.Function.ASSIGN.ordinal()) &&(C.getAuthority(clanRole.intValue(),Clan.Function.VOTE_ASSIGN)!=Clan.Authority.CAN_NOT_DO)) ||((CV.function!=Clan.Function.ASSIGN.ordinal()) &&(C.getAuthority(clanRole.intValue(),Clan.Function.VOTE_OTHER)!=Clan.Authority.CAN_NOT_DO))) votesForYou.add(CV); } if(voteNumStr.length()==0) { if(votesForYou.size()==0) msg.append(L("Your @x1 does not have anything up for your vote.",C.getGovernmentName())); else { msg.append(L(" @x1@x2Command to execute\n\r",CMStrings.padRight("#",3),CMStrings.padRight(L("Status"),15))); for(int v=0;v<votesForYou.size();v++) { final Clan.ClanVote CV=votesForYou.get(v); final boolean ivoted=((CV.votes!=null)&&(CV.votes.containsFirst(mob.Name()))); final int votesCast=(CV.votes!=null)?CV.votes.size():0; msg.append((ivoted?"*":" ") +CMStrings.padRight(""+(v+1),3) +CMStrings.padRight(((CV.voteStatus==Clan.VSTAT_STARTED)?(votesCast+" votes cast"):(Clan.VSTAT_DESCS[CV.voteStatus])),15) +CMStrings.padRight(CV.matter,55)+"\n\r"); } msg.append(L("\n\rEnter CLANVOTE [#] to see details or place your vote.")); } } else { final int which=CMath.s_int(voteNumStr)-1; Clan.ClanVote CV=null; if((which>=0)&&(which<votesForYou.size())) CV=votesForYou.get(which); if(CV==null) msg.append(L("That vote does not exist. Use CLANVOTE to see a list.")); else { int yeas=0; int nays=0; Boolean myVote=null; if(CV.votes!=null) for(int vs=0;vs<CV.votes.size();vs++) { if(CV.votes.getFirst(vs).equals(mob.Name())) myVote=CV.votes.getSecond(vs); if(CV.votes.getSecond(vs).booleanValue()) yeas++; else nays++; } msg.append(L("Vote : @x1\n\r",""+(which+1))); msg.append(L("Started by : @x1\n\r",CV.voteStarter)); if(CV.voteStatus==Clan.VSTAT_STARTED) msg.append(L("Started on : @x1\n\r",CMLib.time().date2String(CV.voteStarted))); else msg.append(L("Ended on : @x1\n\r",CMLib.time().date2String(CV.voteStarted))); msg.append(L("Status : @x1\n\r",Clan.VSTAT_DESCS[CV.voteStatus])); switch(CV.voteStatus) { case Clan.VSTAT_STARTED: msg.append(L("If passed, the following command would be executed:\n\r")); break; case Clan.VSTAT_PASSED: msg.append(L("Results : @x1 Yeas, @x2 Nays\n\r",""+yeas,""+nays)); msg.append(L("The following command has been executed:\n\r")); break; case Clan.VSTAT_FAILED: msg.append(L("Results : @x1 Yeas, @x2 Nays\n\r",""+yeas,""+nays)); msg.append(L("The following command will not be executed:\n\r")); break; } msg.append(CV.matter+"\n\r"); if((CV.voteStatus==Clan.VSTAT_STARTED)&&(myVote==null)) { mob.tell(msg.toString()); msg=new StringBuffer(""); final StringBuffer prompt=new StringBuffer(""); String choices=""; if(CV.votes==null) CV.votes=new PairVector<String,Boolean>(); prompt.append("Y)EA N)AY "); choices="YN"; if(CV.voteStarter.equals(mob.Name())) { prompt.append("C)ANCEL "); choices+="C"; } final String enterWhat="to skip"; //if(myVote!=null) enterWhat=("to keep ("+(myVote.booleanValue()?"Y":"N")+") "); // no revote boolean updateVote=false; if((prompt.length()>0)&&(mob.session()!=null)) { final String answer=mob.session().choose(L("Choices: @x1or ENTER @x2: ",prompt.toString(),enterWhat),choices,""); if(answer.length()>0) switch(answer.toUpperCase().charAt(0)) { case 'Y': msg.append(L("Your YEA vote is recorded.")); CV.votes.add(mob.Name(),Boolean.TRUE); updateVote=true; yeas++; break; case 'N': CV.votes.add(mob.Name(),Boolean.FALSE); msg.append(L("Your NAY vote is recorded.")); updateVote=true; nays++; break; case 'C': if((mob.session()!=null) &&(mob.session().confirm(L("This will cancel this entire vote, are you sure (N/y)?"),"N"))) { C.delVote(CV); CMLib.clans().clanAnnounce(mob,L("A prior vote for @x1 @x2 has been deleted.",C.getGovernmentName(),C.clanID())); msg.append(L("The vote has been deleted.")); updateVote=true; } break; } } final int numVotes=C.getNumVoters(Function.values()[CV.function]); if(numVotes<=(yeas+nays)) { updateVote=true; if(yeas<=nays) CV.voteStatus=Clan.VSTAT_FAILED; else { CV.voteStatus=Clan.VSTAT_PASSED; final MOB mob2=CMClass.getFactoryMOB(); mob2.setName(C.clanID()); mob2.setClan(C.clanID(),C.getTopRankedRoles(Function.ASSIGN).get(0).intValue()); mob2.basePhyStats().setLevel(1000); if(mob2.location()==null) { mob2.setLocation(mob2.getStartRoom()); if(mob2.location()==null) mob2.setLocation(CMLib.map().getRandomRoom()); } final Vector<String> V=CMParms.parse(CV.matter); mob2.doCommand(V,metaFlags|MUDCmdProcessor.METAFLAG_FORCED); mob2.destroy(); } } if(updateVote) C.updateVotes(); } } } } mob.tell(msg.toString()); return false; } @Override public boolean canBeOrdered(){return false;} }
/* * Copyright (c) JForum Team * All rights reserved. * * Redistribution and use in source and binary forms, * with or without modification, are permitted provided * that the following conditions are met: * * 1) Redistributions of source code must retain the above * copyright notice, this list of conditions and the * following disclaimer. * 2) Redistributions in binary form must reproduce the * above copyright notice, this list of conditions and * the following disclaimer in the documentation and/or * other materials provided with the distribution. * 3) Neither the name of "Rafael Steil" nor * the names of its contributors may be used to endorse * or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT * HOLDERS AND CONTRIBUTORS "AS IS" AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL * THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, * OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER * IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE * * Created on 20/05/2004 - 15:51:10 * The JForum Project * http://www.jforum.net */ package net.jforum.dao.generic; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Timestamp; import java.util.ArrayList; import java.util.Date; import java.util.List; import net.jforum.JForumExecutionContext; import net.jforum.dao.DataAccessDriver; import net.jforum.dao.UserDAO; import net.jforum.entities.Post; import net.jforum.entities.PrivateMessage; import net.jforum.entities.PrivateMessageType; import net.jforum.entities.User; import net.jforum.exceptions.DatabaseException; import net.jforum.util.DbUtils; import net.jforum.util.preferences.SystemGlobals; /** * @author Rafael Steil * @version $Id$ */ public class GenericPrivateMessageDAO extends AutoKeys implements net.jforum.dao.PrivateMessageDAO { /** * @see net.jforum.dao.PrivateMessageDAO#send(net.jforum.entities.PrivateMessage) */ public void send(PrivateMessage pm) { // We should store 2 copies: one for the sendee's sent box // and another for the target user's inbox. PreparedStatement pstmt = null; try { pstmt = this.getStatementForAutoKeys("PrivateMessageModel.add"); // Sendee's sent box this.addPm(pm, pstmt); this.addPmText(pm); // Target user's inbox pstmt.setInt(1, PrivateMessageType.NEW); pm.setId(this.executeAutoKeysQuery(pstmt)); this.addPmText(pm); } catch (Exception e) { throw new DatabaseException(e); } finally { DbUtils.close(pstmt); } } protected void addPmText(PrivateMessage pm) throws Exception { PreparedStatement text = null; try { text = JForumExecutionContext.getConnection().prepareStatement(SystemGlobals.getSql("PrivateMessagesModel.addText")); text.setInt(1, pm.getId()); text.setString(2, pm.getPost().getText()); text.executeUpdate(); } finally { DbUtils.close(text); } } protected void addPm(PrivateMessage pm, PreparedStatement pstmt) throws SQLException { pstmt.setInt(1, PrivateMessageType.SENT); pstmt.setString(2, pm.getPost().getSubject()); pstmt.setInt(3, pm.getFromUser().getId()); pstmt.setInt(4, pm.getToUser().getId()); pstmt.setTimestamp(5, new Timestamp(pm.getPost().getTime().getTime())); pstmt.setInt(6, pm.getPost().isBbCodeEnabled() ? 1 : 0); pstmt.setInt(7, pm.getPost().isHtmlEnabled() ? 1 : 0); pstmt.setInt(8, pm.getPost().isSmiliesEnabled() ? 1 : 0); pstmt.setInt(9, pm.getPost().isSignatureEnabled() ? 1 : 0); this.setAutoGeneratedKeysQuery(SystemGlobals.getSql("PrivateMessagesModel.lastGeneratedPmId")); pm.setId(this.executeAutoKeysQuery(pstmt)); } /** * @see net.jforum.dao.PrivateMessageDAO#delete(net.jforum.entities.PrivateMessage[], int) */ public void delete(PrivateMessage[] pm, int userId) { PreparedStatement deleteMessage = null; PreparedStatement deleteText = null; PreparedStatement isDeleteAllowed = null; try { Connection connection = JForumExecutionContext.getConnection(); deleteMessage = connection.prepareStatement(SystemGlobals.getSql("PrivateMessageModel.delete")); deleteText = connection.prepareStatement(SystemGlobals.getSql("PrivateMessagesModel.deleteText")); isDeleteAllowed = connection.prepareStatement(SystemGlobals.getSql("PrivateMessagesModel.isDeleteAllowed")); isDeleteAllowed.setInt(2, userId); isDeleteAllowed.setInt(3, userId); for (int i = 0; i < pm.length; i++) { PrivateMessage currentMessage = pm[i]; isDeleteAllowed.setInt(1, currentMessage.getId()); ResultSet rs = null; try { rs = isDeleteAllowed.executeQuery(); if (rs.next()) { deleteText.setInt(1, currentMessage.getId()); deleteText.executeUpdate(); deleteMessage.setInt(1, currentMessage.getId()); deleteMessage.executeUpdate(); } } finally { DbUtils.close(rs); } } } catch (SQLException e) { throw new DatabaseException(e); } finally { DbUtils.close(deleteMessage); DbUtils.close(deleteText); DbUtils.close(isDeleteAllowed); } } /** * @see net.jforum.dao.PrivateMessageDAO#selectFromInbox(net.jforum.entities.User) */ public List<PrivateMessage> selectFromInbox(User user) { String query = SystemGlobals.getSql("PrivateMessageModel.baseListing"); query = query.replaceAll("#FILTER#", SystemGlobals.getSql("PrivateMessageModel.inbox")); PreparedStatement pstmt = null; ResultSet rs = null; try { pstmt = JForumExecutionContext.getConnection().prepareStatement(query); pstmt.setInt(1, user.getId()); List<PrivateMessage> pmList = new ArrayList<PrivateMessage>(); rs = pstmt.executeQuery(); while (rs.next()) { PrivateMessage pm = this.getPm(rs, false); User fromUser = new User(); fromUser.setId(rs.getInt("user_id")); fromUser.setUsername(rs.getString("username")); pm.setFromUser(fromUser); pmList.add(pm); } return pmList; } catch (SQLException e) { throw new DatabaseException(e); } finally { DbUtils.close(rs, pstmt); } } /** * @see net.jforum.dao.PrivateMessageDAO#selectFromSent(net.jforum.entities.User) */ public List<PrivateMessage> selectFromSent(User user) { String query = SystemGlobals.getSql("PrivateMessageModel.baseListing"); query = query.replaceAll("#FILTER#", SystemGlobals.getSql("PrivateMessageModel.sent")); PreparedStatement pstmt = null; ResultSet rs = null; try { pstmt = JForumExecutionContext.getConnection().prepareStatement(query); pstmt.setInt(1, user.getId()); List<PrivateMessage> pmList = new ArrayList<PrivateMessage>(); rs = pstmt.executeQuery(); while (rs.next()) { PrivateMessage pm = this.getPm(rs, false); User toUser = new User(); toUser.setId(rs.getInt("user_id")); toUser.setUsername(rs.getString("username")); pm.setToUser(toUser); pmList.add(pm); } return pmList; } catch (SQLException e) { throw new DatabaseException(e); } finally { DbUtils.close(rs, pstmt); } } protected PrivateMessage getPm(ResultSet rs) throws SQLException { return this.getPm(rs, true); } protected PrivateMessage getPm(ResultSet rs, boolean full) throws SQLException { PrivateMessage pm = new PrivateMessage(); Post post = new Post(); pm.setId(rs.getInt("privmsgs_id")); pm.setType(rs.getInt("privmsgs_type")); post.setTime(new Date(rs.getTimestamp("privmsgs_date").getTime())); post.setSubject(rs.getString("privmsgs_subject")); pm.setPostDate(post.getTime()); if (full) { UserDAO um = DataAccessDriver.getInstance().newUserDAO(); pm.setFromUser(um.selectById(rs.getInt("privmsgs_from_userid"))); pm.setToUser(um.selectById(rs.getInt("privmsgs_to_userid"))); post.setBbCodeEnabled(rs.getInt("privmsgs_enable_bbcode") == 1); post.setSignatureEnabled(rs.getInt("privmsgs_attach_sig") == 1); post.setHtmlEnabled(rs.getInt("privmsgs_enable_html") == 1); post.setSmiliesEnabled(rs.getInt("privmsgs_enable_smilies") == 1); post.setText(this.getPmText(rs)); } pm.setPost(post); return pm; } protected String getPmText(ResultSet rs) throws SQLException { return rs.getString("privmsgs_text"); } /** * @see net.jforum.dao.PrivateMessageDAO#selectById(net.jforum.entities.PrivateMessage) */ public PrivateMessage selectById(PrivateMessage origPrivMsg) { PrivateMessage pm = origPrivMsg; PreparedStatement pstmt = null; ResultSet rs = null; try { pstmt = JForumExecutionContext.getConnection().prepareStatement( SystemGlobals.getSql("PrivateMessageModel.selectById")); pstmt.setInt(1, pm.getId()); rs = pstmt.executeQuery(); if (rs.next()) { pm = this.getPm(rs); } return pm; } catch (SQLException e) { throw new DatabaseException(e); } finally { DbUtils.close(rs, pstmt); } } /** * @see net.jforum.dao.PrivateMessageDAO#updateType(net.jforum.entities.PrivateMessage) */ public void updateType(PrivateMessage pm) { PreparedStatement pstmt = null; try { pstmt = JForumExecutionContext.getConnection().prepareStatement( SystemGlobals.getSql("PrivateMessageModel.updateType")); pstmt.setInt(1, pm.getType()); pstmt.setInt(2, pm.getId()); pstmt.executeUpdate(); } catch (SQLException e) { throw new DatabaseException(e); } finally { DbUtils.close(pstmt); } } }
package nl.kaninefatendreef.si.server.model.jpa; import javax.persistence.CascadeType; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.OneToOne; import javax.persistence.Table; @Entity @Table(name = "SIMPLER_INVOICE_DOCUMENT") public class SimplerInvoiceDocument implements nl.kaninefatendreef.si.server.model.SimplerInvoiceDocument{ @Id @GeneratedValue (strategy = GenerationType.IDENTITY) @Column(name = "SIMPLER_INVOICE_DOCUMENT_ID") private Long id; @Column(name = "FILENAME") String fileName; @Column(name = "MESSAGE_ID") String messageId; @Column(name = "CHANNEL_ID") String channelId; @Column(name = "RECIPIENT_ID") String recipientId; @Column(name = "SENDER_ID") String senderId; @Column(name = "DOCUMENT_ID") String documentId; @Column(name = "PROCESS_ID") String processId; @Column(name = "REMOTE_HOST") String remoteHost; @Column(name = "ACCESS_POINT_PRINCIPLE") String accessPointPrinciple; // Extra meta information needed for processing the documents @Column(name = "PROCESSED") boolean processed; @Column(name = "CREATE_TIME_MS") Long createTimeInMs; @Column(name = "UPDATE_TIME_MS") Long updateTimeInMs; @Column(name = "PROCESS_STATUS") String processStatus; @Column(name = "PROCESS_STATUS_MS") Long processStatusTimeInMs; @Column(name = "PROCESS_RETRY") Integer procesRetry; @Column(name = "PROPERTY_1_NAME") String property1Name; @Column(name = "PROPERTY_1_VALUE") String property1Value; @Column(name = "PROPERTY_2_NAME") String property2Name; @Column(name = "PROPERTY_2_VALUE") String property2Value; @Column(name = "PROPERTY_3_NAME") String property3Name; @Column(name = "PROPERTY_3_VALUE") String property3Value; @Column(name = "PROPERTY_4_NAME") String property4Name; @Column(name = "PROPERTY_4_VALUE") String property4Value; @Column(name = "PROPERTY_5_NAME") String property5Name; @Column(name = "PROPERTY_5_VALUE") String property5Value; @OneToOne(cascade=CascadeType.ALL) SimplerInvoiceDocumentContent content; public SimplerInvoiceDocumentContent getContent() { return content; } public void setContent(nl.kaninefatendreef.si.server.model.SimplerInvoiceDocumentContent content) { this.content = (SimplerInvoiceDocumentContent)content; } public String getFileName() { return fileName; } public String getId() { return "" + id; } public void setId(String id) { this.id = new Long(id); } public void setFileName(String fileName) { this.fileName = fileName; } public String getMessageId() { return messageId; } public void setMessageId(String messageId) { this.messageId = messageId; } public String getChannelId() { return channelId; } public void setChannelId(String channelId) { this.channelId = channelId; } public String getRecipientId() { return recipientId; } public void setRecipientId(String recipientId) { this.recipientId = recipientId; } public String getSenderId() { return senderId; } public void setSenderId(String senderId) { this.senderId = senderId; } public String getDocumentId() { return documentId; } public void setDocumentId(String documentId) { this.documentId = documentId; } public String getProcessId() { return processId; } public void setProcessId(String processId) { this.processId = processId; } public String getRemoteHost() { return remoteHost; } public void setRemoteHost(String remoteHost) { this.remoteHost = remoteHost; } public String getAccessPointPrinciple() { return accessPointPrinciple; } public void setAccessPointPrinciple(String accessPointPrinciple) { this.accessPointPrinciple = accessPointPrinciple; } public Boolean getProcessed() { return processed; } public void setProcessed(Boolean processed) { this.processed = processed; } public Long getCreateTimeInMs() { return createTimeInMs; } public void setCreateTimeInMs(Long createTimeInMs) { this.createTimeInMs = createTimeInMs; } public Long getUpdateTimeInMs() { return updateTimeInMs; } public void setUpdateTimeInMs(Long updateTimeInMs) { this.updateTimeInMs = updateTimeInMs; } @Override public void setProcessStatus(String processStatus) { this.processStatus = processStatus; } @Override public String getProcessStatus() { return processStatus; } @Override public void setProcessStatusTimeInMs(Long processStatusTimeInMs) { this.processStatusTimeInMs = processStatusTimeInMs; } @Override public Long getProcessStatusTimeInMs() { return processStatusTimeInMs; } public String getProperty1Name() { return property1Name; } public void setProperty1Name(String property1Name) { this.property1Name = property1Name; } public String getProperty1Value() { return property1Value; } public void setProperty1Value(String property1Value) { this.property1Value = property1Value; } public String getProperty2Name() { return property2Name; } public void setProperty2Name(String property2Name) { this.property2Name = property2Name; } public String getProperty2Value() { return property2Value; } public void setProperty2Value(String property2Value) { this.property2Value = property2Value; } public String getProperty3Name() { return property3Name; } public void setProperty3Name(String property3Name) { this.property3Name = property3Name; } public String getProperty3Value() { return property3Value; } public void setProperty3Value(String property3Value) { this.property3Value = property3Value; } public String getProperty4Name() { return property4Name; } public void setProperty4Name(String property4Name) { this.property4Name = property4Name; } public String getProperty4Value() { return property4Value; } public void setProperty4Value(String property4Value) { this.property4Value = property4Value; } public String getProperty5Name() { return property5Name; } public void setProperty5Name(String property5Name) { this.property5Name = property5Name; } public String getProperty5Value() { return property5Value; } public void setProperty5Value(String property5Value) { this.property5Value = property5Value; } public Integer getProcesRetry() { return procesRetry; } public void setProcesRetry(Integer procesRetry) { this.procesRetry = procesRetry; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.cache.client.internal; import java.util.HashSet; import java.util.Set; import java.util.concurrent.ScheduledExecutorService; import org.apache.logging.log4j.Logger; import org.apache.geode.CancelCriterion; import org.apache.geode.CancelException; import org.apache.geode.GemFireConfigException; import org.apache.geode.annotations.internal.MutableForTesting; import org.apache.geode.cache.GatewayConfigurationException; import org.apache.geode.cache.client.ServerRefusedConnectionException; import org.apache.geode.cache.client.internal.ServerDenyList.FailureTracker; import org.apache.geode.cache.wan.GatewaySender; import org.apache.geode.distributed.internal.DistributionConfig; import org.apache.geode.distributed.internal.InternalDistributedSystem; import org.apache.geode.distributed.internal.ServerLocation; import org.apache.geode.internal.cache.tier.sockets.CacheClientUpdater; import org.apache.geode.internal.cache.tier.sockets.ClientProxyMembershipID; import org.apache.geode.internal.net.SocketCreatorFactory; import org.apache.geode.internal.security.SecurableCommunicationChannel; import org.apache.geode.logging.internal.log4j.api.LogService; import org.apache.geode.security.GemFireSecurityException; /** * Creates connections, using a connection source to determine which server to connect to. * * @since GemFire 5.7 * */ public class ConnectionFactoryImpl implements ConnectionFactory { private static final Logger logger = LogService.getLogger(); // TODO - GEODE-1746, the handshake holds state. It seems like the code depends // on all of the handshake operations happening in a single thread. I don't think we // want that, need to refactor. private final ServerDenyList denyList; private ConnectionSource source; private PoolImpl pool; private final CancelCriterion cancelCriterion; private final ConnectionConnector connectionConnector; /** * Test hook for client version support * * @since GemFire 5.7 */ @MutableForTesting public static boolean testFailedConnectionToServer = false; ConnectionFactoryImpl(ConnectionSource source, EndpointManager endpointManager, InternalDistributedSystem sys, int socketBufferSize, int handshakeTimeout, int readTimeout, ClientProxyMembershipID proxyId, CancelCriterion cancelCriterion, boolean usedByGateway, GatewaySender sender, long pingInterval, boolean multiuserSecureMode, PoolImpl pool, final DistributionConfig distributionConfig) { this( new ConnectionConnector(endpointManager, sys, socketBufferSize, handshakeTimeout, readTimeout, usedByGateway, sender, (usedByGateway || sender != null) ? SocketCreatorFactory .getSocketCreatorForComponent(distributionConfig, SecurableCommunicationChannel.GATEWAY) : SocketCreatorFactory .getSocketCreatorForComponent(distributionConfig, SecurableCommunicationChannel.SERVER), new ClientSideHandshakeImpl(proxyId, sys, sys.getSecurityService(), multiuserSecureMode)), source, pingInterval, pool, cancelCriterion); } public ConnectionFactoryImpl(ConnectionConnector connectionConnector, ConnectionSource source, long pingInterval, PoolImpl pool, CancelCriterion cancelCriterion) { this.connectionConnector = connectionConnector; this.source = source; this.pool = pool; this.cancelCriterion = cancelCriterion; denyList = new ServerDenyList(pingInterval); } public void start(ScheduledExecutorService background) { denyList.start(background); } @Override public ServerDenyList getDenyList() { return denyList; } @Override public Connection createClientToServerConnection(ServerLocation location, boolean forQueue) throws GemFireSecurityException { FailureTracker failureTracker = denyList.getFailureTracker(location); Connection connection = null; try { connection = connectionConnector.connectClientToServer(location, forQueue); failureTracker.reset(); authenticateIfRequired(connection); } catch (GemFireConfigException | CancelException | GemFireSecurityException | GatewayConfigurationException e) { throw e; } catch (ServerRefusedConnectionException src) { // propagate this up, don't retry logger.warn("Could not create a new connection to server: {}", src.getMessage()); testFailedConnectionToServer = true; throw src; } catch (Exception e) { if (e.getMessage() != null && (e.getMessage().equals("Connection refused") || e.getMessage().equals("Connection reset"))) { // this is the most common case, so don't print an exception if (logger.isDebugEnabled()) { logger.debug("Unable to connect to {}: connection refused", location); } } else { logger.warn("Could not connect to: " + location, e); } testFailedConnectionToServer = true; } return connection; } private void authenticateIfRequired(Connection conn) { cancelCriterion.checkCancelInProgress(null); if (!pool.isUsedByGateway() && !pool.getMultiuserAuthentication()) { ServerLocation server = conn.getServer(); if (server.getRequiresCredentials()) { if (server.getUserId() == -1) { Long uniqueID = (Long) AuthenticateUserOp.executeOn(conn, pool); server.setUserId(uniqueID); if (logger.isDebugEnabled()) { logger.debug("CFI.authenticateIfRequired() Completed authentication on {}", conn); } } } } } @Override public ServerLocation findBestServer(ServerLocation currentServer, Set<ServerLocation> excludedServers) { if (currentServer != null && source.isBalanced()) { return currentServer; } final Set<ServerLocation> origExcludedServers = excludedServers; excludedServers = new HashSet<>(excludedServers); Set<ServerLocation> denyListedServers = denyList.getBadServers(); excludedServers.addAll(denyListedServers); ServerLocation server = source.findReplacementServer(currentServer, excludedServers); if (server == null) { // Nothing worked! Let's try without the denylist. if (excludedServers.size() > origExcludedServers.size()) { // We had some servers denylisted so lets give this another whirl. server = source.findReplacementServer(currentServer, origExcludedServers); } } if (server == null && logger.isDebugEnabled()) { logger.debug("Source was unable to findForReplacement any servers"); } return server; } @Override public Connection createClientToServerConnection(Set<ServerLocation> excludedServers) throws GemFireSecurityException { final Set<ServerLocation> origExcludedServers = excludedServers; excludedServers = new HashSet<>(excludedServers); Set<ServerLocation> denyListedServers = denyList.getBadServers(); excludedServers.addAll(denyListedServers); Connection conn = null; RuntimeException fatalException = null; boolean tryDenyList = true; do { ServerLocation server = source.findServer(excludedServers); if (server == null) { if (tryDenyList) { // Nothing worked! Let's try without the denylist. tryDenyList = false; int size = excludedServers.size(); excludedServers.removeAll(denyListedServers); // make sure we didn't remove any of the ones that the caller set not to use excludedServers.addAll(origExcludedServers); if (excludedServers.size() < size) { // We are able to remove some exclusions, so lets give this another whirl. continue; } } if (logger.isDebugEnabled()) { logger.debug("Source was unable to locate any servers"); } if (fatalException != null) { throw fatalException; } return null; } try { conn = createClientToServerConnection(server, false); } catch (CancelException | GemFireSecurityException | GatewayConfigurationException e) { throw e; } catch (ServerRefusedConnectionException srce) { fatalException = srce; if (logger.isDebugEnabled()) { logger.debug("ServerRefusedConnectionException attempting to connect to {}", server, srce); } } catch (Exception e) { logger.warn(String.format("Could not connect to: %s", server), e); } excludedServers.add(server); } while (conn == null); return conn; } @Override public ClientUpdater createServerToClientConnection(Endpoint endpoint, QueueManager qManager, boolean isPrimary, ClientUpdater failedUpdater) { String clientUpdateName = CacheClientUpdater.CLIENT_UPDATER_THREAD_NAME + " on " + endpoint.getMemberId() + " port " + endpoint.getLocation().getPort(); if (logger.isDebugEnabled()) { logger.debug("Establishing: {}", clientUpdateName); } return connectionConnector.connectServerToClient(endpoint, qManager, isPrimary, failedUpdater, clientUpdateName); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.query; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableMap; import org.apache.druid.guice.annotations.PublicApi; import org.apache.druid.java.util.common.IAE; import org.apache.druid.java.util.common.Numbers; import java.util.concurrent.TimeUnit; @PublicApi public class QueryContexts { public static final String PRIORITY_KEY = "priority"; public static final String TIMEOUT_KEY = "timeout"; public static final String MAX_SCATTER_GATHER_BYTES_KEY = "maxScatterGatherBytes"; public static final String MAX_QUEUED_BYTES_KEY = "maxQueuedBytes"; public static final String DEFAULT_TIMEOUT_KEY = "defaultTimeout"; @Deprecated public static final String CHUNK_PERIOD_KEY = "chunkPeriod"; public static final boolean DEFAULT_BY_SEGMENT = false; public static final boolean DEFAULT_POPULATE_CACHE = true; public static final boolean DEFAULT_USE_CACHE = true; public static final boolean DEFAULT_POPULATE_RESULTLEVEL_CACHE = true; public static final boolean DEFAULT_USE_RESULTLEVEL_CACHE = true; public static final int DEFAULT_PRIORITY = 0; public static final int DEFAULT_UNCOVERED_INTERVALS_LIMIT = 0; public static final long DEFAULT_TIMEOUT_MILLIS = TimeUnit.MINUTES.toMillis(5); public static final long NO_TIMEOUT = 0; public static <T> boolean isBySegment(Query<T> query) { return isBySegment(query, DEFAULT_BY_SEGMENT); } public static <T> boolean isBySegment(Query<T> query, boolean defaultValue) { return parseBoolean(query, "bySegment", defaultValue); } public static <T> boolean isPopulateCache(Query<T> query) { return isPopulateCache(query, DEFAULT_POPULATE_CACHE); } public static <T> boolean isPopulateCache(Query<T> query, boolean defaultValue) { return parseBoolean(query, "populateCache", defaultValue); } public static <T> boolean isUseCache(Query<T> query) { return isUseCache(query, DEFAULT_USE_CACHE); } public static <T> boolean isUseCache(Query<T> query, boolean defaultValue) { return parseBoolean(query, "useCache", defaultValue); } public static <T> boolean isPopulateResultLevelCache(Query<T> query) { return isPopulateResultLevelCache(query, DEFAULT_POPULATE_RESULTLEVEL_CACHE); } public static <T> boolean isPopulateResultLevelCache(Query<T> query, boolean defaultValue) { return parseBoolean(query, "populateResultLevelCache", defaultValue); } public static <T> boolean isUseResultLevelCache(Query<T> query) { return isUseResultLevelCache(query, DEFAULT_USE_RESULTLEVEL_CACHE); } public static <T> boolean isUseResultLevelCache(Query<T> query, boolean defaultValue) { return parseBoolean(query, "useResultLevelCache", defaultValue); } public static <T> boolean isFinalize(Query<T> query, boolean defaultValue) { return parseBoolean(query, "finalize", defaultValue); } public static <T> boolean isSerializeDateTimeAsLong(Query<T> query, boolean defaultValue) { return parseBoolean(query, "serializeDateTimeAsLong", defaultValue); } public static <T> boolean isSerializeDateTimeAsLongInner(Query<T> query, boolean defaultValue) { return parseBoolean(query, "serializeDateTimeAsLongInner", defaultValue); } public static <T> int getUncoveredIntervalsLimit(Query<T> query) { return getUncoveredIntervalsLimit(query, DEFAULT_UNCOVERED_INTERVALS_LIMIT); } public static <T> int getUncoveredIntervalsLimit(Query<T> query, int defaultValue) { return parseInt(query, "uncoveredIntervalsLimit", defaultValue); } public static <T> int getPriority(Query<T> query) { return getPriority(query, DEFAULT_PRIORITY); } public static <T> int getPriority(Query<T> query, int defaultValue) { return parseInt(query, PRIORITY_KEY, defaultValue); } @Deprecated public static <T> String getChunkPeriod(Query<T> query) { return query.getContextValue(CHUNK_PERIOD_KEY, "P0D"); } public static <T> Query<T> withMaxScatterGatherBytes(Query<T> query, long maxScatterGatherBytesLimit) { Object obj = query.getContextValue(MAX_SCATTER_GATHER_BYTES_KEY); if (obj == null) { return query.withOverriddenContext(ImmutableMap.of(MAX_SCATTER_GATHER_BYTES_KEY, maxScatterGatherBytesLimit)); } else { long curr = ((Number) obj).longValue(); if (curr > maxScatterGatherBytesLimit) { throw new IAE( "configured [%s = %s] is more than enforced limit of [%s].", MAX_SCATTER_GATHER_BYTES_KEY, curr, maxScatterGatherBytesLimit ); } else { return query; } } } public static <T> Query<T> verifyMaxQueryTimeout(Query<T> query, long maxQueryTimeout) { long timeout = getTimeout(query); if (timeout > maxQueryTimeout) { throw new IAE( "configured [%s = %s] is more than enforced limit of maxQueryTimeout [%s].", TIMEOUT_KEY, timeout, maxQueryTimeout ); } else { return query; } } public static <T> long getMaxQueuedBytes(Query<T> query, long defaultValue) { return parseLong(query, MAX_QUEUED_BYTES_KEY, defaultValue); } public static <T> long getMaxScatterGatherBytes(Query<T> query) { return parseLong(query, MAX_SCATTER_GATHER_BYTES_KEY, Long.MAX_VALUE); } public static <T> boolean hasTimeout(Query<T> query) { return getTimeout(query) != NO_TIMEOUT; } public static <T> long getTimeout(Query<T> query) { return getTimeout(query, getDefaultTimeout(query)); } public static <T> long getTimeout(Query<T> query, long defaultTimeout) { final long timeout = parseLong(query, TIMEOUT_KEY, defaultTimeout); Preconditions.checkState(timeout >= 0, "Timeout must be a non negative value, but was [%s]", timeout); return timeout; } public static <T> Query<T> withTimeout(Query<T> query, long timeout) { return query.withOverriddenContext(ImmutableMap.of(TIMEOUT_KEY, timeout)); } public static <T> Query<T> withDefaultTimeout(Query<T> query, long defaultTimeout) { return query.withOverriddenContext(ImmutableMap.of(QueryContexts.DEFAULT_TIMEOUT_KEY, defaultTimeout)); } static <T> long getDefaultTimeout(Query<T> query) { final long defaultTimeout = parseLong(query, DEFAULT_TIMEOUT_KEY, DEFAULT_TIMEOUT_MILLIS); Preconditions.checkState(defaultTimeout >= 0, "Timeout must be a non negative value, but was [%s]", defaultTimeout); return defaultTimeout; } static <T> long parseLong(Query<T> query, String key, long defaultValue) { final Object val = query.getContextValue(key); return val == null ? defaultValue : Numbers.parseLong(val); } static <T> int parseInt(Query<T> query, String key, int defaultValue) { final Object val = query.getContextValue(key); return val == null ? defaultValue : Numbers.parseInt(val); } static <T> boolean parseBoolean(Query<T> query, String key, boolean defaultValue) { final Object val = query.getContextValue(key); return val == null ? defaultValue : Numbers.parseBoolean(val); } private QueryContexts() { } }
/** * Copyright (c) 2008-2013, http://www.snakeyaml.org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.yaml.snakeyaml.extensions.compactnotation; import java.beans.IntrospectionException; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.yaml.snakeyaml.constructor.Construct; import org.yaml.snakeyaml.constructor.Constructor; import org.yaml.snakeyaml.error.YAMLException; import org.yaml.snakeyaml.introspector.Property; import org.yaml.snakeyaml.nodes.MappingNode; import org.yaml.snakeyaml.nodes.Node; import org.yaml.snakeyaml.nodes.NodeTuple; import org.yaml.snakeyaml.nodes.ScalarNode; import org.yaml.snakeyaml.nodes.SequenceNode; /** * Construct a custom Java instance out of a compact object notation format. */ public class CompactConstructor extends Constructor { private static final Pattern GUESS_COMPACT = Pattern .compile("\\p{Alpha}.*\\s*\\((?:,?\\s*(?:(?:\\w*)|(?:\\p{Alpha}\\w*\\s*=.+))\\s*)+\\)"); private static final Pattern FIRST_PATTERN = Pattern.compile("(\\p{Alpha}.*)(\\s*)\\((.*?)\\)"); private static final Pattern PROPERTY_NAME_PATTERN = Pattern .compile("\\s*(\\p{Alpha}\\w*)\\s*=(.+)"); private Construct compactConstruct; protected Object constructCompactFormat(ScalarNode node, CompactData data) { try { Object obj = createInstance(node, data); Map<String, Object> properties = new HashMap<String, Object>(data.getProperties()); setProperties(obj, properties); return obj; } catch (Exception e) { throw new YAMLException(e); } } protected Object createInstance(ScalarNode node, CompactData data) throws Exception { Class<?> clazz = getClassForName(data.getPrefix()); Class<?>[] args = new Class[data.getArguments().size()]; for (int i = 0; i < args.length; i++) { // assume all the arguments are Strings args[i] = String.class; } java.lang.reflect.Constructor<?> c = clazz.getDeclaredConstructor(args); c.setAccessible(true); return c.newInstance(data.getArguments().toArray()); } protected void setProperties(Object bean, Map<String, Object> data) throws Exception { if (data == null) { throw new NullPointerException("Data for Compact Object Notation cannot be null."); } for (Map.Entry<String, Object> entry : data.entrySet()) { String key = entry.getKey(); Property property = getPropertyUtils().getProperty(bean.getClass(), key); try { property.set(bean, entry.getValue()); } catch (IllegalArgumentException e) { throw new YAMLException("Cannot set property='" + key + "' with value='" + data.get(key) + "' (" + data.get(key).getClass() + ") in " + bean); } } } public CompactData getCompactData(String scalar) { if (!scalar.endsWith(")")) { return null; } if (scalar.indexOf('(') < 0) { return null; } Matcher m = FIRST_PATTERN.matcher(scalar); if (m.matches()) { String tag = m.group(1).trim(); String content = m.group(3); CompactData data = new CompactData(tag); if (content.length() == 0) return data; String[] names = content.split("\\s*,\\s*"); for (int i = 0; i < names.length; i++) { String section = names[i]; if (section.indexOf('=') < 0) { data.getArguments().add(section); } else { Matcher sm = PROPERTY_NAME_PATTERN.matcher(section); if (sm.matches()) { String name = sm.group(1); String value = sm.group(2).trim(); data.getProperties().put(name, value); } else { return null; } } } return data; } return null; } private Construct getCompactConstruct() { if (compactConstruct == null) { compactConstruct = createCompactConstruct(); } return compactConstruct; } protected Construct createCompactConstruct() { return new ConstructCompactObject(); } @Override protected Construct getConstructor(Node node) { if (node instanceof MappingNode) { MappingNode mnode = (MappingNode) node; List<NodeTuple> list = mnode.getValue(); if (list.size() == 1) { NodeTuple tuple = list.get(0); Node key = tuple.getKeyNode(); if (key instanceof ScalarNode) { ScalarNode scalar = (ScalarNode) key; if (GUESS_COMPACT.matcher(scalar.getValue()).matches()) { return getCompactConstruct(); } } } } else if (node instanceof ScalarNode) { ScalarNode scalar = (ScalarNode) node; if (GUESS_COMPACT.matcher(scalar.getValue()).matches()) { return getCompactConstruct(); } } return super.getConstructor(node); } public class ConstructCompactObject extends ConstructMapping { @Override public void construct2ndStep(Node node, Object object) { // Compact Object Notation may contain only one entry MappingNode mnode = (MappingNode) node; NodeTuple nodeTuple = mnode.getValue().iterator().next(); Node valueNode = nodeTuple.getValueNode(); if (valueNode instanceof MappingNode) { valueNode.setType(object.getClass()); constructJavaBean2ndStep((MappingNode) valueNode, object); } else { // value is a list applySequence(object, constructSequence((SequenceNode) valueNode)); } } /* * MappingNode and ScalarNode end up here only they assumed to be a * compact object's representation (@see getConstructor(Node) above) */ public Object construct(Node node) { ScalarNode tmpNode = null; if (node instanceof MappingNode) { // Compact Object Notation may contain only one entry MappingNode mnode = (MappingNode) node; NodeTuple nodeTuple = mnode.getValue().iterator().next(); node.setTwoStepsConstruction(true); tmpNode = (ScalarNode) nodeTuple.getKeyNode(); // return constructScalar((ScalarNode) keyNode); } else { tmpNode = (ScalarNode) node; } CompactData data = getCompactData(tmpNode.getValue()); if (data == null) { // TODO: Should we throw an exception here ? return constructScalar(tmpNode); } return constructCompactFormat(tmpNode, data); } } protected void applySequence(Object bean, List<?> value) { try { Property property = getPropertyUtils().getProperty(bean.getClass(), getSequencePropertyName(bean.getClass())); property.set(bean, value); } catch (Exception e) { throw new YAMLException(e); } } /** * Provide the name of the property which is used when the entries form a * sequence. The property must be a List. * * @throws IntrospectionException */ protected String getSequencePropertyName(Class<?> bean) throws IntrospectionException { Set<Property> properties = getPropertyUtils().getProperties(bean); for (Iterator<Property> iterator = properties.iterator(); iterator.hasNext();) { Property property = iterator.next(); if (!List.class.isAssignableFrom(property.getType())) { iterator.remove(); } } if (properties.size() == 0) { throw new YAMLException("No list property found in " + bean); } else if (properties.size() > 1) { throw new YAMLException( "Many list properties found in " + bean + "; Please override getSequencePropertyName() to specify which property to use."); } return properties.iterator().next().getName(); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.horn.core; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; import org.apache.commons.lang.math.RandomUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.FloatWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.WritableUtils; import org.apache.hama.Constants; import org.apache.hama.HamaConfiguration; import org.apache.hama.bsp.BSPJob; import org.apache.hama.commons.io.FloatMatrixWritable; import org.apache.hama.commons.io.VectorWritable; import org.apache.hama.commons.math.DenseFloatMatrix; import org.apache.hama.commons.math.DenseFloatVector; import org.apache.hama.commons.math.FloatFunction; import org.apache.hama.commons.math.FloatMatrix; import org.apache.hama.commons.math.FloatVector; import org.apache.hama.util.ReflectionUtils; import org.apache.horn.core.Constants.LearningStyle; import org.apache.horn.core.Constants.TrainingMethod; import org.apache.horn.examples.MultiLayerPerceptron.StandardNeuron; import org.apache.horn.funcs.FunctionFactory; import org.apache.horn.funcs.IdentityFunction; import org.apache.horn.funcs.SoftMax; import org.apache.horn.utils.MathUtils; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; /** * SmallLayeredNeuralNetwork defines the general operations for derivative * layered models, include Linear Regression, Logistic Regression, Multilayer * Perceptron, Autoencoder, and Restricted Boltzmann Machine, etc. For * SmallLayeredNeuralNetwork, the training can be conducted in parallel, but the * parameters of the models are assumes to be stored in a single machine. * * In general, these models consist of neurons which are aligned in layers. * Between layers, for any two adjacent layers, the neurons are connected to * form a bipartite weighted graph. * */ public class LayeredNeuralNetwork extends AbstractLayeredNeuralNetwork { private static final Log LOG = LogFactory.getLog(LayeredNeuralNetwork.class); /* Weights between neurons at adjacent layers */ protected List<FloatMatrix> weightMatrixList; /* Previous weight updates between neurons at adjacent layers */ protected List<FloatMatrix> prevWeightUpdatesList; /* Different layers can have different squashing function */ protected List<FloatFunction> squashingFunctionList; protected List<Class<? extends Neuron<?>>> neuronClassList; protected int finalLayerIdx; private List<Neuron<?>[]> neurons = new ArrayList<Neuron<?>[]>(); private long iterations; public LayeredNeuralNetwork() { this.layerSizeList = Lists.newArrayList(); this.weightMatrixList = Lists.newArrayList(); this.prevWeightUpdatesList = Lists.newArrayList(); this.squashingFunctionList = Lists.newArrayList(); this.neuronClassList = Lists.newArrayList(); } public LayeredNeuralNetwork(HamaConfiguration conf, String modelPath) { super(conf, modelPath); initializeNeurons(false); } public LayeredNeuralNetwork(HamaConfiguration conf, String modelPath, boolean isTraining) { super(conf, modelPath); initializeNeurons(isTraining); } // initialize neuron objects private void initializeNeurons(boolean isTraining) { for (int i = 0; i < layerSizeList.size(); i++) { int numOfNeurons = layerSizeList.get(i); Class<? extends Neuron<?>> neuronClass; if (i == 0) neuronClass = StandardNeuron.class; // actually doesn't needed else neuronClass = neuronClassList.get(i - 1); Neuron<?>[] tmp = new Neuron[numOfNeurons]; for (int j = 0; j < numOfNeurons; j++) { Neuron<?> n = newNeuronInstance(neuronClass); if (i > 0) n.setSquashingFunction(squashingFunctionList.get(i - 1)); else n.setSquashingFunction(new IdentityFunction()); n.setLayerIndex(i); n.setNeuronID(j); n.setLearningRate(this.learningRate); n.setMomentumWeight(this.momentumWeight); n.setTraining(isTraining); tmp[j] = n; } neurons.add(tmp); } } @Override /** * {@inheritDoc} */ public int addLayer(int size, boolean isFinalLayer, FloatFunction squashingFunction, Class<? extends Neuron<?>> neuronClass) { return addLayer(size, isFinalLayer, squashingFunction, neuronClass, null); } public int addLayer(int size, boolean isFinalLayer, FloatFunction squashingFunction, Class<? extends Neuron<?>> neuronClass, Class<? extends IntermediateOutput> interlayer) { Preconditions.checkArgument(size > 0, "Size of layer must be larger than 0."); if (!isFinalLayer) { if (this.layerSizeList.size() == 0) { LOG.info("add input layer: " + size + " neurons"); } else { LOG.info("add hidden layer: " + size + " neurons"); } size += 1; } this.layerSizeList.add(size); int layerIdx = this.layerSizeList.size() - 1; if (isFinalLayer) { this.finalLayerIdx = layerIdx; LOG.info("add output layer: " + size + " neurons"); } // add weights between current layer and previous layer, and input layer has // no squashing function if (layerIdx > 0) { int sizePrevLayer = this.layerSizeList.get(layerIdx - 1); // row count equals to size of current size and column count equals to // size of previous layer int row = isFinalLayer ? size : size - 1; int col = sizePrevLayer; FloatMatrix weightMatrix = new DenseFloatMatrix(row, col); // initialize weights weightMatrix.applyToElements(new FloatFunction() { @Override public float apply(float value) { return RandomUtils.nextFloat() - 0.5f; } @Override public float applyDerivative(float value) { throw new UnsupportedOperationException(""); } }); this.weightMatrixList.add(weightMatrix); this.prevWeightUpdatesList.add(new DenseFloatMatrix(row, col)); this.squashingFunctionList.add(squashingFunction); this.neuronClassList.add(neuronClass); } return layerIdx; } /** * Update the weight matrices with given matrices. * * @param matrices */ public void updateWeightMatrices(FloatMatrix[] matrices) { for (int i = 0; i < matrices.length; ++i) { FloatMatrix matrix = this.weightMatrixList.get(i); this.weightMatrixList.set(i, matrix.add(matrices[i])); } } /** * Set the previous weight matrices. * * @param prevUpdates */ void setPrevWeightMatrices(FloatMatrix[] prevUpdates) { this.prevWeightUpdatesList.clear(); Collections.addAll(this.prevWeightUpdatesList, prevUpdates); } /** * Add a batch of matrices onto the given destination matrices. * * @param destMatrices * @param sourceMatrices */ static void matricesAdd(FloatMatrix[] destMatrices, FloatMatrix[] sourceMatrices) { for (int i = 0; i < destMatrices.length; ++i) { destMatrices[i] = destMatrices[i].add(sourceMatrices[i]); } } /** * Get all the weight matrices. * * @return The matrices in form of matrix array. */ FloatMatrix[] getWeightMatrices() { FloatMatrix[] matrices = new FloatMatrix[this.weightMatrixList.size()]; this.weightMatrixList.toArray(matrices); return matrices; } /** * Set the weight matrices. * * @param matrices */ public void setWeightMatrices(FloatMatrix[] matrices) { this.weightMatrixList = new ArrayList<FloatMatrix>(); Collections.addAll(this.weightMatrixList, matrices); } /** * Get the previous matrices updates in form of array. * * @return The matrices in form of matrix array. */ public FloatMatrix[] getPrevMatricesUpdates() { FloatMatrix[] prevMatricesUpdates = new FloatMatrix[this.prevWeightUpdatesList .size()]; for (int i = 0; i < this.prevWeightUpdatesList.size(); ++i) { prevMatricesUpdates[i] = this.prevWeightUpdatesList.get(i); } return prevMatricesUpdates; } public void setWeightMatrix(int index, FloatMatrix matrix) { Preconditions.checkArgument( 0 <= index && index < this.weightMatrixList.size(), String.format( "index [%d] should be in range[%d, %d].", index, 0, this.weightMatrixList.size())); this.weightMatrixList.set(index, matrix); } @Override public void readFields(DataInput input) throws IOException { super.readFields(input); this.finalLayerIdx = input.readInt(); this.dropRate = input.readFloat(); // read neuron classes int neuronClasses = input.readInt(); this.neuronClassList = Lists.newArrayList(); for (int i = 0; i < neuronClasses; ++i) { try { Class<? extends Neuron<?>> clazz = (Class<? extends Neuron<?>>) Class .forName(input.readUTF()); neuronClassList.add(clazz); } catch (ClassNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } } // read squash functions int squashingFunctionSize = input.readInt(); this.squashingFunctionList = Lists.newArrayList(); for (int i = 0; i < squashingFunctionSize; ++i) { this.squashingFunctionList.add(FunctionFactory .createFloatFunction(WritableUtils.readString(input))); } // read weights and construct matrices of previous updates int numOfMatrices = input.readInt(); this.weightMatrixList = Lists.newArrayList(); this.prevWeightUpdatesList = Lists.newArrayList(); for (int i = 0; i < numOfMatrices; ++i) { FloatMatrix matrix = FloatMatrixWritable.read(input); this.weightMatrixList.add(matrix); this.prevWeightUpdatesList.add(new DenseFloatMatrix(matrix.getRowCount(), matrix.getColumnCount())); } } @Override public void write(DataOutput output) throws IOException { super.write(output); output.writeInt(finalLayerIdx); output.writeFloat(dropRate); // write neuron classes output.writeInt(this.neuronClassList.size()); for (Class<? extends Neuron<?>> clazz : this.neuronClassList) { output.writeUTF(clazz.getName()); } // write squashing functions output.writeInt(this.squashingFunctionList.size()); for (FloatFunction aSquashingFunctionList : this.squashingFunctionList) { WritableUtils.writeString(output, aSquashingFunctionList.getFunctionName()); } // write weight matrices output.writeInt(this.weightMatrixList.size()); for (FloatMatrix aWeightMatrixList : this.weightMatrixList) { FloatMatrixWritable.write(aWeightMatrixList, output); } // DO NOT WRITE WEIGHT UPDATE } @Override public FloatMatrix getWeightsByLayer(int layerIdx) { return this.weightMatrixList.get(layerIdx); } /** * Get the output of the model according to given feature instance. */ @Override public FloatVector getOutput(FloatVector instance) { Preconditions.checkArgument(this.layerSizeList.get(0) - 1 == instance .getDimension(), String.format( "The dimension of input instance should be %d.", this.layerSizeList.get(0) - 1)); // transform the features to another space FloatVector transformedInstance = this.featureTransformer .transform(instance); // add bias feature FloatVector instanceWithBias = new DenseFloatVector( transformedInstance.getDimension() + 1); instanceWithBias.set(0, 0.99999f); // set bias to be a little bit less than // 1.0 for (int i = 1; i < instanceWithBias.getDimension(); ++i) { instanceWithBias.set(i, transformedInstance.get(i - 1)); } // return the output of the last layer return getOutputInternal(instanceWithBias); } public void setDropRateOfInputLayer(float dropRate) { this.dropRate = dropRate; } /** * Calculate output internally, the intermediate output of each layer will be * stored. * * @param instanceWithBias The instance contains the features. * @return Cached output of each layer. */ public FloatVector getOutputInternal(FloatVector instanceWithBias) { // sets the output of input layer Neuron<?>[] inputLayer = neurons.get(0); for (int i = 0; i < inputLayer.length; i++) { float m2 = MathUtils.getBinomial(1, dropRate); if(m2 == 0) inputLayer[i].setDrop(true); else inputLayer[i].setDrop(false); inputLayer[i].setOutput(instanceWithBias.get(i) * m2); } for (int i = 0; i < this.layerSizeList.size() - 1; ++i) { forward(i); } FloatVector output = new DenseFloatVector( neurons.get(this.finalLayerIdx).length); for (int i = 0; i < output.getDimension(); i++) { output.set(i, neurons.get(this.finalLayerIdx)[i].getOutput()); } return output; } /** * @param neuronClass * @return a new neuron instance */ @SuppressWarnings({ "rawtypes" }) public static Neuron newNeuronInstance(Class<? extends Neuron> neuronClass) { return (Neuron) ReflectionUtils.newInstance(neuronClass); } public class InputMessageIterable implements Iterable<Synapse<FloatWritable, FloatWritable>> { private int currNeuronID; private int prevNeuronID; private int end; private FloatMatrix weightMat; private Neuron<?>[] layer; public InputMessageIterable(int fromLayer, int row) { this.currNeuronID = row; this.prevNeuronID = -1; this.end = weightMatrixList.get(fromLayer).getColumnCount() - 1; this.weightMat = weightMatrixList.get(fromLayer); this.layer = neurons.get(fromLayer); } @Override public Iterator<Synapse<FloatWritable, FloatWritable>> iterator() { return new MessageIterator(); } private class MessageIterator implements Iterator<Synapse<FloatWritable, FloatWritable>> { @Override public boolean hasNext() { if (prevNeuronID < end) { return true; } else { return false; } } private FloatWritable i = new FloatWritable(); private FloatWritable w = new FloatWritable(); private Synapse<FloatWritable, FloatWritable> msg = new Synapse<FloatWritable, FloatWritable>(); @Override public Synapse<FloatWritable, FloatWritable> next() { prevNeuronID++; i.set(layer[prevNeuronID].getOutput()); w.set(weightMat.get(currNeuronID, prevNeuronID)); msg.set(prevNeuronID, i, w); return new Synapse<FloatWritable, FloatWritable>(prevNeuronID, i, w); } @Override public void remove() { } } } /** * Forward the calculation for one layer. * * @param fromLayer The index of the previous layer. */ protected void forward(int fromLayer) { int curLayerIdx = fromLayer + 1; FloatMatrix weightMatrix = this.weightMatrixList.get(fromLayer); FloatFunction squashingFunction = getSquashingFunction(fromLayer); FloatVector vec = new DenseFloatVector(weightMatrix.getRowCount()); for (int row = 0; row < weightMatrix.getRowCount(); row++) { Neuron<?> n; if (curLayerIdx == finalLayerIdx) n = neurons.get(curLayerIdx)[row]; else n = neurons.get(curLayerIdx)[row + 1]; try { Iterable msgs = new InputMessageIterable(fromLayer, row); n.setIterationNumber(iterations); n.forward(msgs); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } vec.set(row, n.getOutput()); } if (squashingFunction.getFunctionName().equalsIgnoreCase( SoftMax.class.getSimpleName())) { IntermediateOutput interlayer = (IntermediateOutput) ReflectionUtils .newInstance(SoftMax.SoftMaxOutputComputer.class); try { vec = interlayer.interlayer(vec); for (int i = 0; i < vec.getDimension(); i++) { neurons.get(curLayerIdx)[i].setOutput(vec.get(i)); } } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } // add bias if (curLayerIdx != finalLayerIdx) neurons.get(curLayerIdx)[0].setOutput(1); } /** * Train the model online. * * @param trainingInstance */ public void trainOnline(FloatVector trainingInstance) { FloatMatrix[] updateMatrices = this.trainByInstance(trainingInstance); this.updateWeightMatrices(updateMatrices); } @Override public FloatMatrix[] trainByInstance(FloatVector trainingInstance) { FloatVector transformedVector = this.featureTransformer .transform(trainingInstance.sliceUnsafe(this.layerSizeList.get(0) - 1)); int inputDimension = this.layerSizeList.get(0) - 1; int outputDimension; FloatVector inputInstance = null; FloatVector labels = null; if (this.learningStyle == LearningStyle.SUPERVISED) { outputDimension = this.layerSizeList.get(this.layerSizeList.size() - 1); // validate training instance Preconditions.checkArgument( inputDimension + outputDimension == trainingInstance.getDimension(), String .format( "The dimension of training instance is %d, but requires %d.", trainingInstance.getDimension(), inputDimension + outputDimension)); inputInstance = new DenseFloatVector(this.layerSizeList.get(0)); inputInstance.set(0, 1); // add bias // get the features from the transformed vector for (int i = 0; i < inputDimension; ++i) { inputInstance.set(i + 1, transformedVector.get(i)); } // get the labels from the original training instance labels = trainingInstance.sliceUnsafe(inputInstance.getDimension() - 1, trainingInstance.getDimension() - 1); } else if (this.learningStyle == LearningStyle.UNSUPERVISED) { // labels are identical to input features outputDimension = inputDimension; // validate training instance Preconditions.checkArgument(inputDimension == trainingInstance .getDimension(), String.format( "The dimension of training instance is %d, but requires %d.", trainingInstance.getDimension(), inputDimension)); inputInstance = new DenseFloatVector(this.layerSizeList.get(0)); inputInstance.set(0, 1); // add bias // get the features from the transformed vector for (int i = 0; i < inputDimension; ++i) { inputInstance.set(i + 1, transformedVector.get(i)); } // get the labels by copying the transformed vector labels = transformedVector.deepCopy(); } FloatVector output = this.getOutputInternal(inputInstance); // get the training error calculateTrainingError(labels, output); if (this.trainingMethod.equals(TrainingMethod.GRADIENT_DESCENT)) { FloatMatrix[] updates = this.trainByInstanceGradientDescent(labels); return updates; } else { throw new IllegalArgumentException( String.format("Training method is not supported.")); } } /** * Train by gradient descent. Get the updated weights using one training * instance. * * @param trainingInstance * @return The weight update matrices. */ private FloatMatrix[] trainByInstanceGradientDescent(FloatVector labels) { // initialize weight update matrices DenseFloatMatrix[] weightUpdateMatrices = new DenseFloatMatrix[this.weightMatrixList .size()]; for (int m = 0; m < weightUpdateMatrices.length; ++m) { weightUpdateMatrices[m] = new DenseFloatMatrix(this.weightMatrixList.get( m).getRowCount(), this.weightMatrixList.get(m).getColumnCount()); } FloatVector deltaVec = new DenseFloatVector( this.layerSizeList.get(this.layerSizeList.size() - 1)); FloatFunction squashingFunction = this.squashingFunctionList .get(this.squashingFunctionList.size() - 1); FloatMatrix lastWeightMatrix = this.weightMatrixList .get(this.weightMatrixList.size() - 1); for (int i = 0; i < deltaVec.getDimension(); ++i) { float finalOut = neurons.get(finalLayerIdx)[i].getOutput(); float costFuncDerivative = this.costFunction.applyDerivative( labels.get(i), finalOut); // add regularization costFuncDerivative += this.regularizationWeight * lastWeightMatrix.getRowVector(i).sum(); if (!squashingFunction.getFunctionName().equalsIgnoreCase( SoftMax.class.getSimpleName())) { costFuncDerivative *= squashingFunction.applyDerivative(finalOut); } neurons.get(finalLayerIdx)[i].backpropagate(costFuncDerivative); deltaVec.set(i, costFuncDerivative); } // start from previous layer of output layer for (int layer = this.layerSizeList.size() - 2; layer >= 0; --layer) { backpropagate(layer, weightUpdateMatrices[layer]); } this.setPrevWeightMatrices(weightUpdateMatrices); return weightUpdateMatrices; } public class ErrorMessageIterable implements Iterable<Synapse<FloatWritable, FloatWritable>> { private int row; private int neuronID; private int end; private FloatMatrix weightMat; private FloatMatrix prevWeightMat; private float[] nextLayerDelta; public ErrorMessageIterable(int curLayerIdx, int row) { this.row = row; this.neuronID = -1; this.weightMat = weightMatrixList.get(curLayerIdx); this.end = weightMat.getRowCount() - 1; this.prevWeightMat = prevWeightUpdatesList.get(curLayerIdx); Neuron<?>[] nextLayer = neurons.get(curLayerIdx + 1); nextLayerDelta = new float[weightMat.getRowCount()]; for(int i = 0; i <= end; ++i) { if (curLayerIdx + 1 == finalLayerIdx) { nextLayerDelta[i] = nextLayer[i].getDelta(); } else { nextLayerDelta[i] = nextLayer[i + 1].getDelta(); } } } @Override public Iterator<Synapse<FloatWritable, FloatWritable>> iterator() { return new MessageIterator(); } private class MessageIterator implements Iterator<Synapse<FloatWritable, FloatWritable>> { @Override public boolean hasNext() { if (neuronID < end) { return true; } else { return false; } } private FloatWritable d = new FloatWritable(); private FloatWritable w = new FloatWritable(); private FloatWritable p = new FloatWritable(); private Synapse<FloatWritable, FloatWritable> msg = new Synapse<FloatWritable, FloatWritable>(); @Override public Synapse<FloatWritable, FloatWritable> next() { neuronID++; d.set(nextLayerDelta[neuronID]); w.set(weightMat.get(neuronID, row)); p.set(prevWeightMat.get(neuronID, row)); msg.set(neuronID, d, w, p); return msg; } @Override public void remove() { } } } /** * Back-propagate the errors to from next layer to current layer. The weight * updated information will be stored in the weightUpdateMatrices, and the * delta of the prevLayer would be returned. * * @param layer Index of current layer. */ private void backpropagate(int curLayerIdx, // FloatVector nextLayerDelta, FloatVector curLayerOutput, DenseFloatMatrix weightUpdateMatrix) { // get layer related information int x = this.weightMatrixList.get(curLayerIdx).getColumnCount(); int y = this.weightMatrixList.get(curLayerIdx).getRowCount(); FloatVector deltaVector = new DenseFloatVector(x); Neuron<?>[] ns = neurons.get(curLayerIdx); for (int row = 0; row < x; ++row) { Neuron<?> n = ns[row]; n.setWeightVector(y); try { Iterable msgs = new ErrorMessageIterable(curLayerIdx, row); n.backward(msgs); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } // update weights weightUpdateMatrix.setColumn(row, n.getWeights()); deltaVector.set(row, n.getDelta()); } } @Override protected BSPJob trainInternal(HamaConfiguration conf) throws IOException, InterruptedException, ClassNotFoundException { this.conf = conf; this.fs = FileSystem.get(conf); String modelPath = conf.get("model.path"); if (modelPath != null) { this.modelPath = modelPath; } // modelPath must be set before training if (this.modelPath == null) { throw new IllegalArgumentException( "Please specify the modelPath for model, " + "either through setModelPath() or add 'modelPath' to the training parameters."); } this.writeModelToFile(); // create job BSPJob job = new BSPJob(conf, LayeredNeuralNetworkTrainer.class); job.setJobName("Neural Network training"); job.setJarByClass(LayeredNeuralNetworkTrainer.class); job.setBspClass(LayeredNeuralNetworkTrainer.class); job.getConfiguration().setInt(Constants.ADDITIONAL_BSP_TASKS, 1); job.setBoolean("training.mode", true); job.setInputPath(new Path(conf.get("training.input.path"))); job.setInputFormat(org.apache.hama.bsp.SequenceFileInputFormat.class); job.setInputKeyClass(LongWritable.class); job.setInputValueClass(VectorWritable.class); job.setOutputKeyClass(NullWritable.class); job.setOutputValueClass(NullWritable.class); job.setOutputFormat(org.apache.hama.bsp.NullOutputFormat.class); return job; } @Override protected void calculateTrainingError(FloatVector labels, FloatVector output) { FloatVector errors = labels.deepCopy().applyToElements(output, this.costFunction); this.trainingError = errors.sum(); } /** * Get the squashing function of a specified layer. * * @param idx * @return a new vector with the result of the operation. */ public FloatFunction getSquashingFunction(int idx) { return this.squashingFunctionList.get(idx); } public void setIterationNumber(long iterations) { this.iterations = iterations; } }
/* * The MIT License * Copyright (c) 2012 Microsoft Corporation * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package microsoft.exchange.webservices.data; import javax.xml.stream.XMLStreamException; import java.util.EnumSet; /** * Represents an object that can be used to store user-defined configuration * settings. */ public class UserConfiguration { /** * The object version. */ private static ExchangeVersion ObjectVersion = ExchangeVersion.Exchange2010; /** * For consistency with ServiceObject behavior, access to ItemId is * permitted for a new object. */ /** * The Constant PropertiesAvailableForNewObject. */ private final static EnumSet<UserConfigurationProperties> PropertiesAvailableForNewObject = EnumSet.of(UserConfigurationProperties.BinaryData, UserConfigurationProperties.Dictionary, UserConfigurationProperties.XmlData); /** * The No properties. */ private final UserConfigurationProperties NoProperties = UserConfigurationProperties.values()[0]; /** * The service. */ private ExchangeService service; /** * The name. */ private String name; /** * The parent folder id. */ private FolderId parentFolderId = null; /** * The item id. */ private ItemId itemId = null; /** * The dictionary. */ private UserConfigurationDictionary dictionary = null; /** * The xml data. */ private byte[] xmlData = null; /** * The binary data. */ private byte[] binaryData = null; /** * The properties available for access. */ private EnumSet<UserConfigurationProperties> propertiesAvailableForAccess; /** * The updated properties. */ private EnumSet<UserConfigurationProperties> updatedProperties; /** * Indicates whether changes trigger an update or create operation. */ private boolean isNew = false; /** * Initializes a new instance of <see cref="UserConfiguration"/> class. * * @param service The service to which the user configuration is bound. * @throws Exception the exception */ public UserConfiguration(ExchangeService service) throws Exception { this(service, PropertiesAvailableForNewObject); } /** * Writes a byte array to Xml. * * @param writer The writer. * @param byteArray Byte array to write. * @param xmlElementName Name of the Xml element. * @throws javax.xml.stream.XMLStreamException the xML stream exception * @throws ServiceXmlSerializationException the service xml serialization exception */ private static void writeByteArrayToXml(EwsServiceXmlWriter writer, byte[] byteArray, String xmlElementName) throws XMLStreamException, ServiceXmlSerializationException { EwsUtilities.EwsAssert(writer != null, "UserConfiguration.WriteByteArrayToXml", "writer is null"); EwsUtilities.EwsAssert(xmlElementName != null, "UserConfiguration.WriteByteArrayToXml", "xmlElementName is null"); writer.writeStartElement(XmlNamespace.Types, xmlElementName); if (byteArray != null && byteArray.length > 0) { writer.writeValue(Base64EncoderStream.encode(byteArray), xmlElementName); } writer.writeEndElement(); } /** * Writes to Xml. * * @param writer The writer. * @param xmlNamespace The XML namespace. * @param name The user configuration name. * @param parentFolderId The Id of the folder containing the user configuration. * @throws Exception the exception */ protected static void writeUserConfigurationNameToXml( EwsServiceXmlWriter writer, XmlNamespace xmlNamespace, String name, FolderId parentFolderId) throws Exception { EwsUtilities.EwsAssert(writer != null, "UserConfiguration.WriteUserConfigurationNameToXml", "writer is null"); EwsUtilities.EwsAssert(name != null, "UserConfiguration.WriteUserConfigurationNameToXml", "name is null"); EwsUtilities.EwsAssert(parentFolderId != null, "UserConfiguration.WriteUserConfigurationNameToXml", "parentFolderId is null"); writer.writeStartElement(xmlNamespace, XmlElementNames.UserConfigurationName); writer.writeAttributeValue(XmlAttributeNames.Name, name); parentFolderId.writeToXml(writer); writer.writeEndElement(); } /** * Initializes a new instance of <see cref="UserConfiguration"/> class. * * @param service The service to which the user configuration is bound. * @param requestedProperties The properties requested for this user configuration. * @throws Exception the exception */ protected UserConfiguration(ExchangeService service, EnumSet<UserConfigurationProperties> requestedProperties) throws Exception { EwsUtilities.validateParam(service, "service"); if (service.getRequestedServerVersion().ordinal() < UserConfiguration.ObjectVersion.ordinal()) { throw new ServiceVersionException(String.format( Strings.ObjectTypeIncompatibleWithRequestVersion, this .getClass().getName(), UserConfiguration.ObjectVersion)); } this.service = service; this.isNew = true; this.initializeProperties(requestedProperties); } /** * Gets the name of the user configuration. * * @return the name */ public String getName() { return this.name; } /** * Sets the name. * * @param value the new name */ protected void setName(String value) { this.name = value; } /** * Gets the Id of the folder containing the user configuration. * * @return the parent folder id */ public FolderId getParentFolderId() { return this.parentFolderId; } /** * Sets the parent folder id. * * @param value the new parent folder id */ protected void setParentFolderId(FolderId value) { this.parentFolderId = value; } /** * Gets the Id of the user configuration. * * @return the item id */ public ItemId getItemId() { return this.itemId; } /** * Gets the dictionary of the user configuration. * * @return the dictionary */ public UserConfigurationDictionary getDictionary() { return this.dictionary; } /** * Gets the xml data of the user configuration. * * @return the xml data * @throws microsoft.exchange.webservices.data.PropertyException the property exception */ public byte[] getXmlData() throws PropertyException { this.validatePropertyAccess(UserConfigurationProperties.XmlData); return this.xmlData; } /** * Sets the xml data. * * @param value the new xml data */ public void setXmlData(byte[] value) { this.xmlData = value; this.markPropertyForUpdate(UserConfigurationProperties.XmlData); } /** * Gets the binary data of the user configuration. * * @return the binary data * @throws microsoft.exchange.webservices.data.PropertyException the property exception */ public byte[] getBinaryData() throws PropertyException { this.validatePropertyAccess(UserConfigurationProperties.BinaryData); return this.binaryData; } /** * Sets the binary data. * * @param value the new binary data */ public void setBinaryData(byte[] value) { this.binaryData = value; this.markPropertyForUpdate(UserConfigurationProperties.BinaryData); } /** * Gets a value indicating whether this user configuration has been * modified. * * @return the checks if is dirty */ public boolean getIsDirty() { return (!this.updatedProperties.contains(NoProperties)) || this.dictionary.getIsDirty(); } /** * Binds to an existing user configuration and loads the specified * properties. Calling this method results in a call to EWS. * * @param service The service to which the user configuration is bound. * @param name The name of the user configuration. * @param parentFolderId The Id of the folder containing the user configuration. * @param properties The properties to load. * @return A user configuration instance. * @throws IndexOutOfBoundsException the index out of bounds exception * @throws Exception the exception */ public static UserConfiguration bind(ExchangeService service, String name, FolderId parentFolderId, UserConfigurationProperties properties) throws IndexOutOfBoundsException, Exception { UserConfiguration result = service.getUserConfiguration(name, parentFolderId, properties); result.isNew = false; return result; } /** * Binds to an existing user configuration and loads the specified * properties. * * @param service The service to which the user configuration is bound. * @param name The name of the user configuration. * @param parentFolderName The name of the folder containing the user configuration. * @param properties The properties to load. * @return A user configuration instance. * @throws IndexOutOfBoundsException the index out of bounds exception * @throws Exception the exception */ public static UserConfiguration bind(ExchangeService service, String name, WellKnownFolderName parentFolderName, UserConfigurationProperties properties) throws IndexOutOfBoundsException, Exception { return UserConfiguration.bind(service, name, new FolderId( parentFolderName), properties); } /** * Saves the user configuration. Calling this method results in a call to * EWS. * * @param name The name of the user configuration. * @param parentFolderId The Id of the folder in which to save the user configuration. * @throws Exception the exception */ public void save(String name, FolderId parentFolderId) throws Exception { EwsUtilities.validateParam(name, "name"); EwsUtilities.validateParam(parentFolderId, "parentFolderId"); parentFolderId.validate(this.service.getRequestedServerVersion()); if (!this.isNew) { throw new InvalidOperationException( Strings.CannotSaveNotNewUserConfiguration); } this.parentFolderId = parentFolderId; this.name = name; this.service.createUserConfiguration(this); this.isNew = false; this.resetIsDirty(); } /** * Saves the user configuration. Calling this method results in a call to * EWS. * * @param name The name of the user configuration. * @param parentFolderName The name of the folder in which to save the user * configuration. * @throws Exception the exception */ public void save(String name, WellKnownFolderName parentFolderName) throws Exception { this.save(name, new FolderId(parentFolderName)); } /** * Updates the user configuration by applying local changes to the Exchange * server. Calling this method results in a call to EWS * * @throws Exception the exception */ public void update() throws Exception { if (this.isNew) { throw new InvalidOperationException( Strings.CannotUpdateNewUserConfiguration); } if (this.isPropertyUpdated(UserConfigurationProperties.BinaryData) || this .isPropertyUpdated(UserConfigurationProperties. Dictionary) || this.isPropertyUpdated(UserConfigurationProperties. XmlData)) { this.service.updateUserConfiguration(this); } this.resetIsDirty(); } /** * Deletes the user configuration. Calling this method results in a call to * EWS. * * @throws Exception the exception */ public void delete() throws Exception { if (this.isNew) { throw new InvalidOperationException( Strings.DeleteInvalidForUnsavedUserConfiguration); } else { this.service .deleteUserConfiguration(this.name, this.parentFolderId); } } /** * Loads the specified properties on the user configuration. Calling this * method results in a call to EWS. * * @param properties The properties to load. * @throws Exception the exception */ public void load(UserConfigurationProperties properties) throws Exception { this.initializeProperties(EnumSet.of(properties)); this.service.loadPropertiesForUserConfiguration(this, properties); } /** * Writes to XML. * * @param writer The writer. * @param xmlNamespace The XML namespace. * @param xmlElementName Name of the XML element. * @throws Exception the exception */ protected void writeToXml(EwsServiceXmlWriter writer, XmlNamespace xmlNamespace, String xmlElementName) throws Exception { EwsUtilities.EwsAssert(writer != null, "UserConfiguration.WriteToXml", "writer is null"); EwsUtilities.EwsAssert(xmlElementName != null, "UserConfiguration.WriteToXml", "xmlElementName is null"); writer.writeStartElement(xmlNamespace, xmlElementName); // Write the UserConfigurationName element writeUserConfigurationNameToXml(writer, XmlNamespace.Types, this.name, this.parentFolderId); // Write the Dictionary element if (this.isPropertyUpdated(UserConfigurationProperties.Dictionary)) { this.dictionary.writeToXml(writer, XmlElementNames.Dictionary); } // Write the XmlData element if (this.isPropertyUpdated(UserConfigurationProperties.XmlData)) { this.writeXmlDataToXml(writer); } // Write the BinaryData element if (this.isPropertyUpdated(UserConfigurationProperties.BinaryData)) { this.writeBinaryDataToXml(writer); } writer.writeEndElement(); } /** * Determines whether the specified property was updated. * * @param property property to evaluate. * @return Boolean indicating whether to send the property Xml. */ private boolean isPropertyUpdated(UserConfigurationProperties property) { boolean isPropertyDirty = false; boolean isPropertyEmpty = false; switch (property) { case Dictionary: isPropertyDirty = this.getDictionary().getIsDirty(); isPropertyEmpty = this.getDictionary().getCount() == 0; break; case XmlData: isPropertyDirty = this.updatedProperties.contains(property); isPropertyEmpty = (this.xmlData == null) || (this.xmlData.length == 0); break; case BinaryData: isPropertyDirty = this.updatedProperties.contains(property); isPropertyEmpty = (this.binaryData == null) || (this.binaryData.length == 0); break; default: EwsUtilities.EwsAssert(false, "UserConfiguration.IsPropertyUpdated", "property not supported: " + property.toString()); break; } // Consider the property updated, if it's been modified, and either // . there's a value or // . there's no value but the operation is update. return isPropertyDirty && ((!isPropertyEmpty) || (!this.isNew)); } /** * Writes the XmlData property to Xml. * * @param writer The writer. * @throws javax.xml.stream.XMLStreamException the xML stream exception * @throws ServiceXmlSerializationException the service xml serialization exception */ private void writeXmlDataToXml(EwsServiceXmlWriter writer) throws XMLStreamException, ServiceXmlSerializationException { EwsUtilities.EwsAssert(writer != null, "UserConfiguration.WriteXmlDataToXml", "writer is null"); writeByteArrayToXml(writer, this.xmlData, XmlElementNames.XmlData); } /** * Writes the BinaryData property to Xml. * * @param writer The writer. * @throws javax.xml.stream.XMLStreamException the xML stream exception * @throws ServiceXmlSerializationException the service xml serialization exception */ private void writeBinaryDataToXml(EwsServiceXmlWriter writer) throws XMLStreamException, ServiceXmlSerializationException { EwsUtilities.EwsAssert(writer != null, "UserConfiguration.WriteBinaryDataToXml", "writer is null"); writeByteArrayToXml(writer, this.binaryData, XmlElementNames.BinaryData); } /** * Loads from XML. * * @param reader The reader. * @throws Exception the exception */ protected void loadFromXml(EwsServiceXmlReader reader) throws Exception { EwsUtilities.EwsAssert(reader != null, "UserConfiguration.LoadFromXml", "reader is null"); reader.readStartElement(XmlNamespace.Messages, XmlElementNames.UserConfiguration); reader.read(); // Position at first property element do { if (reader.getNodeType().getNodeType() == XmlNodeType.START_ELEMENT) { if (reader.getLocalName().equals( XmlElementNames.UserConfigurationName)) { String responseName = reader .readAttributeValue(XmlAttributeNames.Name); EwsUtilities.EwsAssert(this.name.equals(responseName), "UserConfiguration.LoadFromXml", "UserConfigurationName does not match: Expected: " + this.name + " Name in response: " + responseName); reader.skipCurrentElement(); } else if (reader.getLocalName().equals(XmlElementNames.ItemId)) { this.itemId = new ItemId(); this.itemId.loadFromXml(reader, XmlElementNames.ItemId); } else if (reader.getLocalName().equals( XmlElementNames.Dictionary)) { this.dictionary.loadFromXml(reader, XmlElementNames.Dictionary); } else if (reader.getLocalName() .equals(XmlElementNames.XmlData)) { this.xmlData = Base64EncoderStream.decode(reader .readElementValue()); } else if (reader.getLocalName().equals( XmlElementNames.BinaryData)) { this.binaryData = Base64EncoderStream.decode(reader .readElementValue()); } else { EwsUtilities.EwsAssert(false, "UserConfiguration.LoadFromXml", "Xml element not supported: " + reader.getLocalName()); } } // If XmlData was loaded, read is skipped because GetXmlData // positions the reader at the next property. reader.read(); } while (!reader.isEndElement(XmlNamespace.Messages, XmlElementNames.UserConfiguration)); } /** * Initializes properties. * * @param requestedProperties The properties requested for this UserConfiguration. */ // / InitializeProperties is called in 3 cases: // / . Create new object: From the UserConfiguration constructor. // / . Bind to existing object: Again from the constructor. The constructor // is called eventually by the GetUserConfiguration request. // / . Refresh properties: From the Load method. private void initializeProperties( EnumSet<UserConfigurationProperties> requestedProperties) { this.itemId = null; this.dictionary = new UserConfigurationDictionary(); this.xmlData = null; this.binaryData = null; this.propertiesAvailableForAccess = requestedProperties; this.resetIsDirty(); } /** * Resets flags to indicate that properties haven't been modified. */ private void resetIsDirty() { try { this.updatedProperties = EnumSet.of(NoProperties); } catch (Exception e) { e.printStackTrace(); } this.dictionary.setIsDirty(false); } /** * Determines whether the specified property may be accessed. * * @param property Property to access. * @throws microsoft.exchange.webservices.data.PropertyException the property exception */ private void validatePropertyAccess(UserConfigurationProperties property) throws PropertyException { if (!this.propertiesAvailableForAccess.contains(property)) { throw new PropertyException( Strings.MustLoadOrAssignPropertyBeforeAccess, property .toString()); } } /** * Adds the passed property to updatedProperties. * * @param property Property to update. */ private void markPropertyForUpdate(UserConfigurationProperties property) { this.updatedProperties.add(property); this.propertiesAvailableForAccess.add(property); } }
package com.mbresson.betaform; import com.badlogic.gdx.assets.AssetManager; import com.badlogic.gdx.graphics.g2d.Sprite; import com.badlogic.gdx.graphics.g2d.SpriteBatch; import com.badlogic.gdx.graphics.g2d.TextureAtlas; import com.badlogic.gdx.graphics.g2d.TextureRegion; import com.badlogic.gdx.math.Vector2; import com.badlogic.gdx.physics.box2d.*; import com.badlogic.gdx.utils.Array; /** * This class is responsible for creating, destroying and displaying bullets. * It handles various types of bullets: user-created bullets, enemy bullets. */ public class BulletManager implements ResourceEater { private static final String classID = BulletManager.class.getName(); private static final float BULLET_SPEED = 10f; public enum BulletType { PLAYER_BULLET("player-bullet"), PLAYER_SUPER_BULLET("player-super-bullet"), ENEMY_BULLET("enemy-bullet"); private String type; private BulletType(String type) { this.type = type; } public String getLabel() { return this.type; } } public class Bullet implements Traversable { public BulletType type; public Body body; public Direction direction; public int strength = 10; public void dispose() { GameInstance gameRef = Betaform.getGameRef(); if(gameRef == null) { return; } gameRef.getWorld().destroyBody(this.body); } public void invertDirection() { this.direction = (this.direction == Direction.LEFT ? Direction.RIGHT : Direction.LEFT); this.body.setLinearVelocity( direction == Direction.LEFT ? -BULLET_SPEED : BULLET_SPEED, 0 ); } } private String getPath() { return "img/sprites/bullets.atlas"; } private TextureAtlas textureAtlas; private Sprite sprite; private Array<Bullet> bullets = new Array<>(10); private Array<Bullet> bulletsToBeDestroyed = new Array<>(10); public BulletManager() { } public void newBullet(BulletType type, Vector2 physicsPosition, Direction direction, int strength) { sprite.setRegion(textureAtlas.findRegion(type.getLabel())); Bullet bullet = new Bullet(); bullet.type = type; bullet.strength = strength; bullet.direction = direction; BodyDef bodyDef = new BodyDef(); bodyDef.type = BodyDef.BodyType.DynamicBody; float finalX = physicsPosition.x; if(direction == Direction.LEFT) { finalX -= sprite.getWidth()/2 / Configuration.Physics.WORLD_UNIT_TO_PIXELS; } else { finalX += sprite.getWidth()/2 / Configuration.Physics.WORLD_UNIT_TO_PIXELS; } float finalY = physicsPosition.y + sprite.getHeight() / Configuration.Physics.WORLD_UNIT_TO_PIXELS; bodyDef.position.set( finalX, finalY ); GameInstance gameRef = Betaform.getGameRef(); if(gameRef == null) { return; } bullet.body = gameRef.getWorld().createBody(bodyDef); bullet.body.setUserData(bullet); bullet.body.setBullet(true); bullet.body.setGravityScale(0); PolygonShape shape = new PolygonShape(); shape.setAsBox( sprite.getWidth()/2 / Configuration.Physics.WORLD_UNIT_TO_PIXELS, sprite.getHeight()/2 / Configuration.Physics.WORLD_UNIT_TO_PIXELS ); FixtureDef fixtureDef = new FixtureDef(); fixtureDef.shape = shape; fixtureDef.density = 0f; fixtureDef.friction = 0f; fixtureDef.isSensor = true; Fixture fixture = bullet.body.createFixture(fixtureDef); shape.dispose(); bullet.body.setLinearVelocity( direction == Direction.LEFT ? -BULLET_SPEED : BULLET_SPEED, 0 ); bullets.add(bullet); } /** * Display all existing bullets. */ public void display(SpriteBatch batch) { for(Bullet bullet: this.bullets) { sprite.setRegion(textureAtlas.findRegion(bullet.type.getLabel())); float x = bullet.body.getPosition().x * Configuration.Physics.WORLD_UNIT_TO_PIXELS - sprite.getWidth() / 2; float y = bullet.body.getPosition().y * Configuration.Physics.WORLD_UNIT_TO_PIXELS - sprite.getHeight() / 2; if(bullet.direction == Direction.LEFT) { sprite.flip(true, false); batch.draw(sprite, x, y); sprite.flip(true, false); } else { batch.draw(sprite, x, y); } } } // deltaTime is a placeholder public void update(float deltaTime) { for(Bullet bullet: bulletsToBeDestroyed) { this.bullets.removeValue(bullet, true); bullet.dispose(); } bulletsToBeDestroyed.clear(); } public void destroyBullet(Bullet bullet) { /* * We queue the bullets to be destroyed: * we can't destroy them while the physics engine is in the middle of its computations. */ /* * Also, make sure a bullet is not set to be destroyed twice: * it can happen if several fixtures of the player are in contact with the bullet at the same time * and call the destroyBullet function at the same time. */ if(this.bulletsToBeDestroyed.contains(bullet, true)) { return; } bulletsToBeDestroyed.add(bullet); } @Override public void preloadResources() throws ResourceLoader.AlreadyPreloadedException { ResourceLoader loader = ResourceLoader.getInstance(); if(!loader.isPreloaded(this.getResourceEaterID())) { AssetManager manager = ResourceLoader.getInstance().registerForPreloading(this.getResourceEaterID()); manager.load(this.getPath(), TextureAtlas.class); } } @Override public void postloadResources() throws ResourceLoader.NotPreloadedYetException { AssetManager manager = ResourceLoader.getInstance().registerForPostloading(this.getResourceEaterID()); this.textureAtlas = manager.get(this.getPath(), TextureAtlas.class); this.sprite = new Sprite(this.textureAtlas.findRegion(BulletType.PLAYER_BULLET.getLabel())); } @Override public String getResourceEaterID() { return classID; } }
/* * Copyright (c) 2015-2016 Evolveum * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.evolveum.midpoint.web.page.admin.roles; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import javax.xml.namespace.QName; import org.apache.wicket.ajax.AjaxRequestTarget; import org.apache.wicket.ajax.form.OnChangeAjaxBehavior; import org.apache.wicket.markup.html.form.DropDownChoice; import org.apache.wicket.markup.html.form.Form; import org.apache.wicket.markup.html.form.IChoiceRenderer; import org.apache.wicket.model.AbstractReadOnlyModel; import org.apache.wicket.model.IModel; import org.apache.wicket.model.Model; import com.evolveum.midpoint.gui.api.component.MainObjectListPanel; import com.evolveum.midpoint.gui.api.page.PageBase; import com.evolveum.midpoint.gui.api.util.WebComponentUtil; import com.evolveum.midpoint.gui.api.util.WebModelServiceUtils; import com.evolveum.midpoint.prism.PrismContext; import com.evolveum.midpoint.prism.PrismObject; import com.evolveum.midpoint.prism.delta.ObjectDelta; import com.evolveum.midpoint.prism.path.ItemPath; import com.evolveum.midpoint.prism.query.AndFilter; import com.evolveum.midpoint.prism.query.EqualFilter; import com.evolveum.midpoint.prism.query.InOidFilter; import com.evolveum.midpoint.prism.query.NotFilter; import com.evolveum.midpoint.prism.query.ObjectFilter; import com.evolveum.midpoint.prism.query.ObjectQuery; import com.evolveum.midpoint.prism.query.OrFilter; import com.evolveum.midpoint.prism.query.RefFilter; import com.evolveum.midpoint.prism.query.TypeFilter; import com.evolveum.midpoint.schema.constants.ObjectTypes; import com.evolveum.midpoint.schema.result.OperationResult; import com.evolveum.midpoint.schema.util.ObjectTypeUtil; import com.evolveum.midpoint.task.api.Task; import com.evolveum.midpoint.task.api.TaskCategory; import com.evolveum.midpoint.util.exception.SchemaException; import com.evolveum.midpoint.util.logging.LoggingUtils; import com.evolveum.midpoint.util.logging.Trace; import com.evolveum.midpoint.util.logging.TraceManager; import com.evolveum.midpoint.web.component.input.ObjectTypeChoiceRenderer; import com.evolveum.midpoint.web.component.input.QNameChoiceRenderer; import com.evolveum.midpoint.web.page.admin.users.component.AbstractRoleMemberPanel; import com.evolveum.midpoint.web.session.UserProfileStorage.TableId; import com.evolveum.midpoint.xml.ns._public.common.common_3.AssignmentType; import com.evolveum.midpoint.xml.ns._public.common.common_3.FocusType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ObjectType; import com.evolveum.midpoint.xml.ns._public.common.common_3.OrgType; import com.evolveum.midpoint.xml.ns._public.common.common_3.RoleType; import com.evolveum.midpoint.xml.ns._public.common.common_3.UserType; public class RoleMemberPanel extends AbstractRoleMemberPanel<RoleType> { private static final long serialVersionUID = 1L; private static final Trace LOGGER = TraceManager.getTrace(RoleMemberPanel.class); private static String ID_OBJECT_TYPE = "type"; private static String ID_TENANT = "tenant"; private static String ID_PROJECT = "project"; public RoleMemberPanel(String id, IModel<RoleType> model, PageBase pageBase) { super(id, TableId.ROLE_MEMEBER_PANEL, model, pageBase); } private PrismContext getPrismContext() { return getPageBase().getPrismContext(); } private <V> DropDownChoice<V> createDropDown(String id, IModel<V> defaultModel, final List<V> values, IChoiceRenderer<V> renderer) { DropDownChoice<V> listSelect = new DropDownChoice<V>(id, defaultModel, new AbstractReadOnlyModel<List<V>>() { private static final long serialVersionUID = 1L; @Override public List<V> getObject() { return values; } }, renderer); listSelect.add(new OnChangeAjaxBehavior() { private static final long serialVersionUID = 1L; @Override protected void onUpdate(AjaxRequestTarget target) { refreshTable(target); } }); return listSelect; } protected void refreshTable(AjaxRequestTarget target) { DropDownChoice<QName> typeChoice = (DropDownChoice) get(createComponentPath(ID_OBJECT_TYPE)); QName type = typeChoice.getModelObject(); getMemberTable().clearCache(); getMemberTable().refreshTable((Class<FocusType>) WebComponentUtil.qnameToClass(getPrismContext(), type), target); } private List<OrgType> createTenantList() { ObjectQuery query; try { query = ObjectQuery.createObjectQuery( EqualFilter.createEqual(OrgType.F_TENANT, OrgType.class, getPrismContext(), true)); List<PrismObject<OrgType>> orgs = WebModelServiceUtils.searchObjects(OrgType.class, query, new OperationResult("Tenant search"), getPageBase()); List<OrgType> orgTypes = new ArrayList<>(); for (PrismObject<OrgType> org : orgs) { orgTypes.add(org.asObjectable()); } return orgTypes; } catch (SchemaException e) { error(getString("pageUsers.message.queryError") + " " + e.getMessage()); return null; } } private List<OrgType> createProjectList() { ObjectQuery query; try { query = ObjectQuery.createObjectQuery(OrFilter.createOr( EqualFilter.createEqual(OrgType.F_TENANT, OrgType.class, getPrismContext(), true), EqualFilter.createEqual(OrgType.F_TENANT, OrgType.class, getPrismContext(), null))); List<PrismObject<OrgType>> orgs = WebModelServiceUtils.searchObjects(OrgType.class, query, new OperationResult("Tenant search"), getPageBase()); List<OrgType> orgTypes = new ArrayList<>(); for (PrismObject<OrgType> org : orgs) { orgTypes.add(org.asObjectable()); } return orgTypes; } catch (SchemaException e) { error(getString("pageUsers.message.queryError") + " " + e.getMessage()); return null; } } // private void addFilter(ObjectFilter filter, List<ObjectFilter> conditions, boolean isNot) { // if (isNot) { // ObjectFilter notFilter = NotFilter.createNot(filter); // conditions.add(notFilter); // } else { // conditions.add(filter); // } // } private MainObjectListPanel<FocusType> getMemberTable() { return (MainObjectListPanel<FocusType>) get(createComponentPath(ID_FORM, ID_CONTAINER_MEMBER, ID_MEMBER_TABLE)); } private AssignmentType createAssignmentToModify() throws SchemaException { AssignmentType assignmentToModify = createAssignmentToModify(null); DropDownChoice<OrgType> tenantChoice = (DropDownChoice<OrgType>) get(ID_TENANT); OrgType tenant = tenantChoice.getModelObject(); if (tenant != null) { assignmentToModify.setTenantRef(ObjectTypeUtil.createObjectRef(tenant.getOid(), ObjectTypes.ORG)); } DropDownChoice<OrgType> projectChoice = (DropDownChoice<OrgType>) get(ID_PROJECT); OrgType project = projectChoice.getModelObject(); if (project != null) { assignmentToModify.setOrgRef(ObjectTypeUtil.createObjectRef(project.getOid(), ObjectTypes.ORG)); } return assignmentToModify; } private ObjectQuery getActionQuery(QueryScope scope) { switch (scope) { case ALL: return createMemberQuery(); case SELECTED: return createRecomputeQuery(); } return null; } private ObjectQuery createRecomputeQuery() { Set<String> oids = getFocusOidToRecompute(); ObjectQuery query = ObjectQuery.createObjectQuery(InOidFilter.createInOid(oids)); return query; } private Set<String> getFocusOidToRecompute() { List<FocusType> availableData = getMemberTable().getSelectedObjects(); Set<String> oids = new HashSet<>(); for (FocusType focus : availableData) { oids.add(focus.getOid()); } return oids; } @Override protected void initCustomLayout(Form form) { } @Override protected void initSearch(Form form) { List<QName> allowedTypes = WebComponentUtil.createFocusTypeList(); allowedTypes.add(FocusType.COMPLEX_TYPE); DropDownChoice<QName> typeSelect = createDropDown(ID_OBJECT_TYPE, Model.of(FocusType.COMPLEX_TYPE), allowedTypes, new QNameChoiceRenderer()); add(typeSelect); DropDownChoice<OrgType> tenant = createDropDown(ID_TENANT, new Model(), createTenantList(), new ObjectTypeChoiceRenderer<OrgType>()); add(tenant); DropDownChoice<OrgType> project = createDropDown(ID_PROJECT, new Model(), createProjectList(), new ObjectTypeChoiceRenderer<OrgType>()); add(project); } @Override protected void addMembersPerformed(QName type, QName relation, List selected, AjaxRequestTarget target) { Task operationalTask = getPageBase().createSimpleTask(getTaskName("Add", null)); ObjectDelta delta = prepareDelta(type, MemberOperation.ADD, operationalTask.getResult()); executeMemberOperation(operationalTask, type, createQueryForAdd(selected), delta, TaskCategory.EXECUTE_CHANGES, target); } private ObjectDelta prepareDelta(QName type, MemberOperation operation, OperationResult result) { Class classType = WebComponentUtil.qnameToClass(getPrismContext(), type); ObjectDelta delta = null; try { switch (operation) { case ADD: delta = ObjectDelta.createModificationAddContainer(classType, "fakeOid", FocusType.F_ASSIGNMENT, getPrismContext(), createAssignmentToModify()); break; case REMOVE: delta = ObjectDelta.createModificationDeleteContainer(classType, "fakeOid", FocusType.F_ASSIGNMENT, getPrismContext(), createAssignmentToModify()); break; } } catch (SchemaException e) { LoggingUtils.logException(LOGGER, "Failed to prepare delta for operation " + operation.name(), e); result.recordFatalError("Failed to prepare delta for operation " + operation.name(), e); } return delta; } @Override protected void removeMembersPerformed(QueryScope scope, AjaxRequestTarget target) { Task operationalTask = getPageBase().createSimpleTask(getTaskName("Remove", scope)); ObjectDelta delta = prepareDelta(FocusType.COMPLEX_TYPE, MemberOperation.REMOVE, operationalTask.getResult()); executeMemberOperation(operationalTask, FocusType.COMPLEX_TYPE, getActionQuery(scope), delta, TaskCategory.EXECUTE_CHANGES, target); } @Override protected void recomputeMembersPerformed(QueryScope scope, AjaxRequestTarget target) { Task operationalTask = getPageBase().createSimpleTask(getTaskName("Recompute", scope)); executeMemberOperation(operationalTask, ObjectType.COMPLEX_TYPE, getActionQuery(scope), null, TaskCategory.RECOMPUTATION, target); } @Override protected ObjectQuery createMemberQuery() { ObjectQuery query = null; String oid = getModelObject().getOid(); List<ObjectFilter> filters = new ArrayList<>(); try { filters.add(RefFilter.createReferenceEqual( new ItemPath(FocusType.F_ASSIGNMENT, AssignmentType.F_TARGET_REF), UserType.class, getPrismContext(), createReference().asReferenceValue())); DropDownChoice<OrgType> tenantChoice = (DropDownChoice) get(createComponentPath(ID_TENANT)); OrgType tenant = tenantChoice.getModelObject(); if (tenant != null) { filters.add(RefFilter.createReferenceEqual( new ItemPath(FocusType.F_ASSIGNMENT, AssignmentType.F_TENANT_REF), UserType.class, getPrismContext(), createReference(tenant).asReferenceValue())); } DropDownChoice<OrgType> projectChoice = (DropDownChoice) get(createComponentPath(ID_PROJECT)); OrgType project = projectChoice.getModelObject(); if (project != null) { filters.add(RefFilter.createReferenceEqual( new ItemPath(FocusType.F_ASSIGNMENT, AssignmentType.F_ORG_REF), UserType.class, getPrismContext(), createReference(project).asReferenceValue())); } query = ObjectQuery.createObjectQuery(AndFilter.createAnd(filters)); if (LOGGER.isTraceEnabled()) { LOGGER.trace("Searching members of role {} with query:\n{}", oid, query.debugDump()); } } catch (SchemaException e) { LoggingUtils.logException(LOGGER, "Couldn't prepare query for org. members.", e); } DropDownChoice<QName> objectTypeChoice = (DropDownChoice) get(createComponentPath(ID_OBJECT_TYPE)); QName objectType = objectTypeChoice.getModelObject(); if (objectType == null || FocusType.COMPLEX_TYPE.equals(objectType)) { return query; } return ObjectQuery.createObjectQuery(TypeFilter.createType(objectType, query.getFilter())); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jmeter.gui; import java.awt.BorderLayout; import java.awt.Component; import java.awt.Container; import java.awt.GridLayout; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.io.File; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.ExecutionException; import javax.swing.ImageIcon; import javax.swing.JButton; import javax.swing.JFileChooser; import javax.swing.JFrame; import javax.swing.JLabel; import javax.swing.JPanel; import javax.swing.JTextField; import javax.swing.SwingUtilities; import javax.swing.SwingWorker; import org.apache.jmeter.gui.action.ActionNames; import org.apache.jmeter.gui.action.HtmlReportGenerator; import org.apache.jmeter.gui.util.EscapeDialog; import org.apache.jmeter.gui.util.JMeterToolBar; import org.apache.jmeter.gui.util.JSyntaxTextArea; import org.apache.jmeter.util.JMeterUtils; import org.apache.jorphan.gui.ComponentUtil; import org.fife.ui.rsyntaxtextarea.SyntaxConstants; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class HtmlReportUI implements ActionListener { private static Set<String> commands = new HashSet<>(); private static final Logger LOGGER = LoggerFactory.getLogger(HtmlReportUI.class); private static final String CREATE_REQUEST = "CREATE_REQUEST"; private static final String BROWSE_CSV = "BROWSE_CSV"; private static final String BROWSE_USER_PROPERTIES = "BROWSE_USER_PROPERTIES"; private static final String BROWSE_OUTPUT = "BROWSE_OUTPUT"; private EscapeDialog messageDialog; private JTextField csvFilePathTextField; private JTextField userPropertiesFilePathTextField; private JTextField outputDirectoryPathTextField; private JButton reportLaunchButton; private JSyntaxTextArea reportArea; private JButton csvFileButton; private JButton outputDirectoryButton; private JButton userPropertiesFileButton; private String lastJFCDirectory; private final String iconSize = JMeterUtils.getPropDefault(JMeterToolBar.TOOLBAR_ICON_SIZE, JMeterToolBar.DEFAULT_TOOLBAR_ICON_SIZE); private static final String GENERATE_REPORT_LABEL = JMeterUtils.getResString("generate_report_ui.html_report_request"); private static final String GENERATING_REPORT_LABEL = JMeterUtils.getResString("generate_report_ui.html_report_processing"); private static final String BROWSE = "browse"; /** An image which is displayed when a test is running. */ private static final String IMAGES_PREFIX = "status/"; private final ImageIcon runningIcon = JMeterUtils.getImage(IMAGES_PREFIX + iconSize +"/task-recurring.png");// $NON-NLS-1$ private final ImageIcon inErrorIcon = JMeterUtils.getImage(IMAGES_PREFIX + iconSize +"/dialog-error-5.png");// $NON-NLS-1$ private final ImageIcon completedIcon = JMeterUtils.getImage(IMAGES_PREFIX + iconSize +"/task-complete.png");// $NON-NLS-1$ static { commands.add(ActionNames.HTML_REPORT); } public HtmlReportUI() { super(); } public void showInputDialog(JFrame parent) { setupInputDialog(parent); launchInputDialog(); } private void launchInputDialog() { messageDialog.pack(); ComponentUtil.centerComponentInWindow(messageDialog); messageDialog.setVisible(true); } public void setupInputDialog(JFrame parent) { messageDialog = new EscapeDialog(parent, JMeterUtils.getResString("html_report"), false); setupContentPane(); } private void setupContentPane() { Container contentPane = messageDialog.getContentPane(); contentPane.setLayout(new BorderLayout()); contentPane.add(setupFileChooserPanel(), BorderLayout.NORTH); reportArea = JSyntaxTextArea.getInstance(10, 60, true); reportArea.setSyntaxEditingStyle(SyntaxConstants.SYNTAX_STYLE_NONE); reportArea.setEditable(false); contentPane.add(reportArea, BorderLayout.CENTER); contentPane.add(setupButtonPanel(), BorderLayout.SOUTH); } private JPanel setupFileChooserPanel() { JPanel fileChooserPanel = new JPanel(new GridLayout(3, 3)); fileChooserPanel.add(new JLabel(JMeterUtils.getResString("generate_report_ui.csv_file"))); csvFilePathTextField = new JTextField(); fileChooserPanel.add(csvFilePathTextField); this.csvFileButton = new JButton(JMeterUtils.getResString(BROWSE)); csvFileButton.setActionCommand(BROWSE_CSV); csvFileButton.addActionListener(this); fileChooserPanel.add(csvFileButton); fileChooserPanel.add(new JLabel(JMeterUtils.getResString("generate_report_ui.user_properties_file"))); userPropertiesFilePathTextField = new JTextField(); fileChooserPanel.add(userPropertiesFilePathTextField); this.userPropertiesFileButton = new JButton(JMeterUtils.getResString(BROWSE)); userPropertiesFileButton.setActionCommand(BROWSE_USER_PROPERTIES); userPropertiesFileButton.addActionListener(this); fileChooserPanel.add(userPropertiesFileButton); fileChooserPanel.add(new JLabel(JMeterUtils.getResString("generate_report_ui.output_directory"))); outputDirectoryPathTextField = new JTextField(); fileChooserPanel.add(outputDirectoryPathTextField); this.outputDirectoryButton = new JButton(JMeterUtils.getResString(BROWSE)); outputDirectoryButton.setActionCommand(BROWSE_OUTPUT); outputDirectoryButton.addActionListener(this); fileChooserPanel.add(outputDirectoryButton); return fileChooserPanel; } private JPanel setupButtonPanel() { JPanel buttonPanel = new JPanel(new GridLayout(1, 1)); reportLaunchButton = new JButton(GENERATE_REPORT_LABEL); reportLaunchButton.setActionCommand(CREATE_REQUEST); reportLaunchButton.addActionListener(this); buttonPanel.add(reportLaunchButton); return buttonPanel; } private class ReportGenerationWorker extends SwingWorker<List<String>, String> { private JButton reportLaunchButton; public ReportGenerationWorker(JButton reportLaunchButton) { this.reportLaunchButton = reportLaunchButton; } @Override protected List<String> doInBackground() throws Exception { HtmlReportGenerator htmlReportAction = new HtmlReportGenerator(csvFilePathTextField.getText(), userPropertiesFilePathTextField.getText(), outputDirectoryPathTextField.getText()); SwingUtilities.invokeAndWait(() -> { reportLaunchButton.setEnabled(false); reportLaunchButton.setIcon(runningIcon); reportLaunchButton.setText(GENERATING_REPORT_LABEL); }); return htmlReportAction.run(); } @Override protected void done() { try { reportLaunchButton.setEnabled(true); reportLaunchButton.setText(GENERATE_REPORT_LABEL); reportToUser(get()); } catch (InterruptedException | ExecutionException exception) { if (LOGGER.isErrorEnabled()) { LOGGER.error("Error during html report generation: {}", exception.getMessage(), exception); } reportToUser(Arrays.asList(exception.getMessage())); } } } private void addTextToReport(String errorMessage) { reportArea.setText(reportArea.getText() + errorMessage + "\n"); } @Override public void actionPerformed(ActionEvent e) { switch (e.getActionCommand()) { case CREATE_REQUEST: try { reportArea.setText(GENERATING_REPORT_LABEL + "\n"); reportLaunchButton.setIcon(runningIcon); new ReportGenerationWorker(reportLaunchButton).execute(); } catch (Exception exception) { if (LOGGER.isErrorEnabled()) { LOGGER.error("Error during html report generation: {}", exception.getMessage(), exception); } } if (LOGGER.isDebugEnabled()) { LOGGER.debug("CSV file path {}\nuser.properties file path: {}\nOutput directory file path: {}", csvFilePathTextField.getText(), userPropertiesFilePathTextField.getText(), outputDirectoryPathTextField.getText()); } break; case BROWSE_USER_PROPERTIES: userPropertiesFilePathTextField.setText(showFileChooser(userPropertiesFileButton.getParent(), userPropertiesFilePathTextField, false, new String[] { ".properties" })); break; case BROWSE_CSV: csvFilePathTextField.setText(showFileChooser(csvFileButton.getParent(), csvFilePathTextField, false, new String[] { ".jtl", ".csv" })); break; case BROWSE_OUTPUT: outputDirectoryPathTextField.setText( showFileChooser(outputDirectoryButton.getParent(), outputDirectoryPathTextField, true, null)); break; default: break; } } void reportToUser(List<String> runErrors) { if (runErrors.isEmpty()) { addTextToReport(JMeterUtils.getResString(HtmlReportGenerator.HTML_REPORT_SUCCESS)); reportLaunchButton.setIcon(completedIcon); } else { addTextToReport(String.join("\n", runErrors)); reportLaunchButton.setIcon(inErrorIcon); } } /** * Show a file chooser to the user * * @param locationTextField * the textField that will receive the path * @param onlyDirectory * whether or not the file chooser will only display directories * @param extensions File extensions to filter * @return the path the user selected or, if the user cancelled the file * chooser, the previous path */ private String showFileChooser(Component component, JTextField locationTextField, boolean onlyDirectory, String[] extensions) { JFileChooser jfc = new JFileChooser(); if (onlyDirectory) { jfc.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY); } else { jfc.setFileSelectionMode(JFileChooser.FILES_AND_DIRECTORIES); } if(extensions != null && extensions.length > 0) { JMeterFileFilter currentFilter = new JMeterFileFilter(extensions); jfc.addChoosableFileFilter(currentFilter); jfc.setAcceptAllFileFilterUsed(true); jfc.setFileFilter(currentFilter); } if (lastJFCDirectory != null) { jfc.setCurrentDirectory(new File(lastJFCDirectory)); } else { String start = System.getProperty("user.dir", ""); //$NON-NLS-1$//$NON-NLS-2$ if (!start.isEmpty()) { jfc.setCurrentDirectory(new File(start)); } } int retVal = jfc.showOpenDialog(component); if (retVal == JFileChooser.APPROVE_OPTION) { lastJFCDirectory = jfc.getCurrentDirectory().getAbsolutePath(); return jfc.getSelectedFile().getPath(); } else { return locationTextField.getText(); } } }
/* * ******************************************************************************************************************** * <p/> * BACKENDLESS.COM CONFIDENTIAL * <p/> * ******************************************************************************************************************** * <p/> * Copyright 2012 BACKENDLESS.COM. All Rights Reserved. * <p/> * NOTICE: All information contained herein is, and remains the property of Backendless.com and its suppliers, * if any. The intellectual and technical concepts contained herein are proprietary to Backendless.com and its * suppliers and may be covered by U.S. and Foreign Patents, patents in process, and are protected by trade secret * or copyright law. Dissemination of this information or reproduction of this material is strictly forbidden * unless prior written permission is obtained from Backendless.com. * <p/> * ******************************************************************************************************************** */ package com.backendless.logging; import com.backendless.Backendless; import com.backendless.Invoker; import com.backendless.async.callback.AsyncCallback; import com.backendless.exceptions.BackendlessFault; import com.backendless.exceptions.ExceptionMessage; import java.io.IOException; import java.io.PrintWriter; import java.io.StringWriter; import java.security.AccessControlException; import java.util.*; import java.util.concurrent.*; public class LogBuffer { private static final int NUM_OF_MESSAGES = 100; private static final int NUM_OF_MESSAGES_CODERUNNER = 1; private static final int TIME_FREQUENCY = 60 * 5; // 5 minutes private static final int TIME_FREQUENCY_CODERUNNER = -1; private static final String LOGGING_SERVER_ALIAS = "com.backendless.services.logging.LogService"; private ScheduledExecutorService scheduledExecutorService; private int numOfMessages; private int timeFrequency; private Queue<LogMessage> logMessages; private ScheduledFuture<?> scheduledFuture; public static class SingletonHolder { public static final LogBuffer HOLDER_INSTANCE = new LogBuffer(); } public static LogBuffer getInstance() { return SingletonHolder.HOLDER_INSTANCE; } private LogBuffer() { numOfMessages = Backendless.isCodeRunner() ? NUM_OF_MESSAGES_CODERUNNER : NUM_OF_MESSAGES; timeFrequency = Backendless.isCodeRunner() ? TIME_FREQUENCY_CODERUNNER : TIME_FREQUENCY; logMessages = new ConcurrentLinkedQueue<>(); if( !Backendless.isCodeRunner() ) scheduledExecutorService = Executors.newSingleThreadScheduledExecutor(); setupTimer(); } public void setLogReportingPolicy( int numOfMessages, int timeFrequency ) { if( numOfMessages <= 0 && timeFrequency <= 0 ) throw new IllegalArgumentException( ExceptionMessage.INVALID_LOG_POLICY ); this.numOfMessages = numOfMessages; this.timeFrequency = timeFrequency; setupTimer(); } public void flush() { if( !logMessages.isEmpty() ) { reportBatch( new ArrayList<>( logMessages ) ); logMessages.clear(); } } private void setupTimer() { if( timeFrequency > 0 ) { scheduledTask(); } } void enqueue( String logger, Level level, String message, Throwable exception ) { if( numOfMessages == 1 ) { reportSingleLogMessage( logger, level, message, exception != null ? getStackTrace( exception ) : null ); return; } logMessages.add( new LogMessage( logger, level, new Date( System.currentTimeMillis() ), message, exception != null ? getStackTrace( exception ) : null ) ); if( numOfMessages > 1 && logMessages.size() >= numOfMessages ) { flush(); } } private void scheduledTask() { if( Backendless.isCodeRunner() ) return; if( !scheduledExecutorService.isShutdown() ) { if( scheduledFuture != null ) scheduledFuture.cancel(false); scheduledFuture = scheduledExecutorService.scheduleAtFixedRate( new Runnable() { @Override public void run() { flush(); } }, 0, timeFrequency, TimeUnit.SECONDS ); } } private String getStackTrace( Throwable t ) { if( t == null ) return null; try ( StringWriter errors = new StringWriter(); PrintWriter s1 = new PrintWriter( errors ); ) { t.printStackTrace( s1 ); return errors.toString(); } catch ( IOException e ) { return null; } } public void reportSingleLogMessage( String logger, Level loglevel, String message, String exception ) { if( Backendless.isCodeRunner() ) { Invoker.invokeSync( LOGGING_SERVER_ALIAS, "log", new Object[]{ loglevel.name(), logger, message, exception } ); } else { Invoker.invokeAsync( LOGGING_SERVER_ALIAS, "log", new Object[]{ loglevel.name(), logger, message, exception }, new AsyncCallback<Void>() { @Override public void handleResponse( Void response ) { } @Override public void handleFault( BackendlessFault fault ) { } } ); } } public void reportBatch( List<LogMessage> logBatches ) { if( Backendless.isCodeRunner() ) { Invoker.invokeSync( LOGGING_SERVER_ALIAS, "batchLog", new Object[]{ logBatches } ); } else { Invoker.invokeAsync( LOGGING_SERVER_ALIAS, "batchLog", new Object[]{ logBatches }, new AsyncCallback<Void>() { @Override public void handleResponse( Void response ) { } @Override public void handleFault( BackendlessFault fault ) { } } ); } } public void close() { scheduledFuture.cancel( true ); scheduledExecutorService.shutdownNow(); } }
/** * Copyright (c) 2014-2015, Data Geekery GmbH, contact@datageekery.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jooq.lambda; import org.junit.Test; import java.util.function.BinaryOperator; import java.util.function.DoubleBinaryOperator; import java.util.function.IntBinaryOperator; import java.util.function.LongBinaryOperator; import java.util.stream.DoubleStream; import java.util.stream.IntStream; import java.util.stream.LongStream; import java.util.stream.Stream; import static org.junit.Assert.*; /** * @author Lukas Eder */ public class CheckedBinaryOperatorTest { @Test public void testCheckedBinaryOperator() { BinaryOperator<Object> test = Unchecked.binaryOperator( (t1, t2) -> { throw new Exception(t1 + ":" + t2); } ); assertBinaryOperator(test, UncheckedException.class); } @Test public void testCheckedBinaryOperatorWithCustomHandler() { BinaryOperator<Object> test = Unchecked.binaryOperator( (t1, t2) -> { throw new Exception(t1 + ":" + t2); }, e -> { throw new IllegalStateException(e); } ); assertBinaryOperator(test, IllegalStateException.class); } @Test public void testCheckedIntBinaryOperator() { IntBinaryOperator test = Unchecked.intBinaryOperator( (i1, i2) -> { throw new Exception(i1 + ":" + i2); } ); assertIntBinaryOperator(test, UncheckedException.class); } @Test public void testCheckedIntBinaryOperatorWithCustomHandler() { IntBinaryOperator test = Unchecked.intBinaryOperator( (i1, i2) -> { throw new Exception(i1 + ":" + i2); }, e -> { throw new IllegalStateException(e); } ); assertIntBinaryOperator(test, IllegalStateException.class); } @Test public void testCheckedLongBinaryOperator() { LongBinaryOperator test = Unchecked.longBinaryOperator( (l1, l2) -> { throw new Exception(l1 + ":" + l2); } ); assertLongBinaryOperator(test, UncheckedException.class); } @Test public void testCheckedLongBinaryOperatorWithCustomHandler() { LongBinaryOperator test = Unchecked.longBinaryOperator( (l1, l2) -> { throw new Exception(l1 + ":" + l2); }, e -> { throw new IllegalStateException(e); } ); assertLongBinaryOperator(test, IllegalStateException.class); } @Test public void testCheckedDoubleBinaryOperator() { DoubleBinaryOperator test = Unchecked.doubleBinaryOperator( (d1, d2) -> { throw new Exception(d1 + ":" + d2); } ); assertDoubleBinaryOperator(test, UncheckedException.class); } @Test public void testCheckedDoubleBinaryOperatorWithCustomHandler() { DoubleBinaryOperator test = Unchecked.doubleBinaryOperator( (d1, d2) -> { throw new Exception(d1 + ":" + d2); }, e -> { throw new IllegalStateException(e); } ); assertDoubleBinaryOperator(test, IllegalStateException.class); } private <E extends RuntimeException> void assertBinaryOperator(BinaryOperator<Object> test, Class<E> type) { assertNotNull(test); try { test.apply(null, null); fail(); } catch (RuntimeException e) { assertException(type, e, "null:null"); } try { Stream.of((Object) "a", "b", "c").reduce(test); } catch (RuntimeException e) { assertException(type, e, "a:b"); } } private <E extends RuntimeException> void assertIntBinaryOperator(IntBinaryOperator test, Class<E> type) { assertNotNull(test); try { test.applyAsInt(0, 0); fail(); } catch (RuntimeException e) { assertException(type, e, "0:0"); } try { IntStream.of(1, 2, 3).reduce(test); } catch (RuntimeException e) { assertException(type, e, "1:2"); } } private <E extends RuntimeException> void assertLongBinaryOperator(LongBinaryOperator test, Class<E> type) { assertNotNull(test); try { test.applyAsLong(0L, 0L); fail(); } catch (RuntimeException e) { assertException(type, e, "0:0"); } try { LongStream.of(1L, 2L, 3L).reduce(test); } catch (RuntimeException e) { assertException(type, e, "1:2"); } } private <E extends RuntimeException> void assertDoubleBinaryOperator(DoubleBinaryOperator test, Class<E> type) { assertNotNull(test); try { test.applyAsDouble(0.0, 0.0); fail(); } catch (RuntimeException e) { assertException(type, e, "0.0:0.0"); } try { DoubleStream.of(1.0, 2.0, 3.0).reduce(test); } catch (RuntimeException e) { assertException(type, e, "1.0:2.0"); } } private <E extends RuntimeException> void assertException(Class<E> type, RuntimeException e, String message) { assertEquals(type, e.getClass()); assertEquals(Exception.class, e.getCause().getClass()); assertEquals(message, e.getCause().getMessage()); } }
package io.innofang.camera2demo.camera2_api; import android.Manifest; import android.content.Context; import android.content.pm.PackageManager; import android.graphics.ImageFormat; import android.graphics.SurfaceTexture; import android.hardware.camera2.CameraAccessException; import android.hardware.camera2.CameraCaptureSession; import android.hardware.camera2.CameraCharacteristics; import android.hardware.camera2.CameraDevice; import android.hardware.camera2.CameraManager; import android.hardware.camera2.CameraMetadata; import android.hardware.camera2.CaptureRequest; import android.hardware.camera2.TotalCaptureResult; import android.hardware.camera2.params.StreamConfigurationMap; import android.media.Image; import android.media.ImageReader; import android.os.Bundle; import android.os.Environment; import android.os.Handler; import android.os.HandlerThread; import android.support.annotation.NonNull; import android.support.v4.app.ActivityCompat; import android.support.v7.app.AppCompatActivity; import android.util.Log; import android.util.Size; import android.util.SparseIntArray; import android.view.Surface; import android.view.TextureView; import android.view.View; import android.widget.Button; import android.widget.Toast; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import io.innofang.camera2demo.R; public class AndroidCameraApi extends AppCompatActivity { private static final String TAG = "AndroidCameraApi"; private Button takePictureButton; private TextureView textureView; private static final SparseIntArray ORIENTATIONS = new SparseIntArray(); static { ORIENTATIONS.append(Surface.ROTATION_0, 90); ORIENTATIONS.append(Surface.ROTATION_90, 0); ORIENTATIONS.append(Surface.ROTATION_180, 270); ORIENTATIONS.append(Surface.ROTATION_270, 180); } private String cameraId; protected CameraDevice cameraDevice; protected CameraCaptureSession cameraCaptureSessions; protected CaptureRequest captureRequest; protected CaptureRequest.Builder captureRequestBuilder; private Size imageDimension; private ImageReader imageReader; private File file; private static final int REQUEST_CAMERA_PERMISSION = 200; private boolean mFlashSupported; private Handler mBackgroundHandler; private HandlerThread mBackgroundThread; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_android_camera_api); textureView = (TextureView) findViewById(R.id.texture); assert textureView != null; textureView.setSurfaceTextureListener(textureListener); takePictureButton = (Button) findViewById(R.id.btn_takepicture); assert takePictureButton != null; takePictureButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { takePicture(); } }); } TextureView.SurfaceTextureListener textureListener = new TextureView.SurfaceTextureListener() { @Override public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) { //open your camera here openCamera(); } @Override public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) { // Transform you image captured size according to the surface width and height } @Override public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) { return false; } @Override public void onSurfaceTextureUpdated(SurfaceTexture surface) { } }; private final CameraDevice.StateCallback stateCallback = new CameraDevice.StateCallback() { @Override public void onOpened(CameraDevice camera) { //This is called when the camera is open Log.e(TAG, "onOpened"); cameraDevice = camera; createCameraPreview(); } @Override public void onDisconnected(CameraDevice camera) { cameraDevice.close(); } @Override public void onError(CameraDevice camera, int error) { cameraDevice.close(); cameraDevice = null; } }; final CameraCaptureSession.CaptureCallback captureCallbackListener = new CameraCaptureSession.CaptureCallback() { @Override public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) { super.onCaptureCompleted(session, request, result); Toast.makeText(AndroidCameraApi.this, "Saved:" + file, Toast.LENGTH_SHORT).show(); createCameraPreview(); } }; protected void startBackgroundThread() { mBackgroundThread = new HandlerThread("Camera Background"); mBackgroundThread.start(); mBackgroundHandler = new Handler(mBackgroundThread.getLooper()); } protected void stopBackgroundThread() { mBackgroundThread.quitSafely(); try { mBackgroundThread.join(); mBackgroundThread = null; mBackgroundHandler = null; } catch (InterruptedException e) { e.printStackTrace(); } } protected void takePicture() { if (null == cameraDevice) { Log.e(TAG, "cameraDevice is null"); return; } CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE); try { CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraDevice.getId()); Size[] jpegSizes = null; if (characteristics != null) { jpegSizes = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP).getOutputSizes(ImageFormat.JPEG); } int width = 640; int height = 480; if (jpegSizes != null && 0 < jpegSizes.length) { width = jpegSizes[0].getWidth(); height = jpegSizes[0].getHeight(); } ImageReader reader = ImageReader.newInstance(width, height, ImageFormat.JPEG, 1); List<Surface> outputSurfaces = new ArrayList<Surface>(2); outputSurfaces.add(reader.getSurface()); outputSurfaces.add(new Surface(textureView.getSurfaceTexture())); final CaptureRequest.Builder captureBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE); captureBuilder.addTarget(reader.getSurface()); captureBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO); // Orientation int rotation = getWindowManager().getDefaultDisplay().getRotation(); captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, ORIENTATIONS.get(rotation)); final File file = new File(Environment.getExternalStorageDirectory() + "/pic.jpg"); ImageReader.OnImageAvailableListener readerListener = new ImageReader.OnImageAvailableListener() { @Override public void onImageAvailable(ImageReader reader) { Image image = null; try { image = reader.acquireLatestImage(); ByteBuffer buffer = image.getPlanes()[0].getBuffer(); byte[] bytes = new byte[buffer.capacity()]; buffer.get(bytes); save(bytes); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } finally { if (image != null) { image.close(); } } } private void save(byte[] bytes) throws IOException { OutputStream output = null; try { output = new FileOutputStream(file); output.write(bytes); } finally { if (null != output) { output.close(); } } } }; reader.setOnImageAvailableListener(readerListener, mBackgroundHandler); final CameraCaptureSession.CaptureCallback captureListener = new CameraCaptureSession.CaptureCallback() { @Override public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) { super.onCaptureCompleted(session, request, result); Toast.makeText(AndroidCameraApi.this, "Saved:" + file, Toast.LENGTH_SHORT).show(); createCameraPreview(); } }; cameraDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() { @Override public void onConfigured(CameraCaptureSession session) { try { session.capture(captureBuilder.build(), captureListener, mBackgroundHandler); } catch (CameraAccessException e) { e.printStackTrace(); } } @Override public void onConfigureFailed(CameraCaptureSession session) { } }, mBackgroundHandler); } catch (CameraAccessException e) { e.printStackTrace(); } } protected void createCameraPreview() { try { SurfaceTexture texture = textureView.getSurfaceTexture(); assert texture != null; texture.setDefaultBufferSize(imageDimension.getWidth(), imageDimension.getHeight()); Surface surface = new Surface(texture); captureRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); captureRequestBuilder.addTarget(surface); cameraDevice.createCaptureSession(Arrays.asList(surface), new CameraCaptureSession.StateCallback() { @Override public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) { //The camera is already closed if (null == cameraDevice) { return; } // When the session is ready, we start displaying the preview. cameraCaptureSessions = cameraCaptureSession; updatePreview(); } @Override public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) { Toast.makeText(AndroidCameraApi.this, "Configuration change", Toast.LENGTH_SHORT).show(); } }, null); } catch (CameraAccessException e) { e.printStackTrace(); } } private void openCamera() { CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE); Log.e(TAG, "is camera open"); try { cameraId = manager.getCameraIdList()[0]; CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); assert map != null; imageDimension = map.getOutputSizes(SurfaceTexture.class)[0]; // Add permission for camera and let user grant the permission if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED && ActivityCompat.checkSelfPermission(this, Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) { ActivityCompat.requestPermissions(AndroidCameraApi.this, new String[]{Manifest.permission.CAMERA, Manifest.permission.WRITE_EXTERNAL_STORAGE}, REQUEST_CAMERA_PERMISSION); return; } manager.openCamera(cameraId, stateCallback, null); } catch (CameraAccessException e) { e.printStackTrace(); } Log.e(TAG, "openCamera X"); } protected void updatePreview() { if (null == cameraDevice) { Log.e(TAG, "updatePreview error, return"); } captureRequestBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO); try { cameraCaptureSessions.setRepeatingRequest(captureRequestBuilder.build(), null, mBackgroundHandler); } catch (CameraAccessException e) { e.printStackTrace(); } } private void closeCamera() { if (null != cameraDevice) { cameraDevice.close(); cameraDevice = null; } if (null != imageReader) { imageReader.close(); imageReader = null; } } @Override public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) { if (requestCode == REQUEST_CAMERA_PERMISSION) { if (grantResults[0] == PackageManager.PERMISSION_DENIED) { // close the app Toast.makeText(AndroidCameraApi.this, "Sorry!!!, you can't use this app without granting permission", Toast.LENGTH_LONG).show(); finish(); } } } @Override protected void onResume() { super.onResume(); Log.e(TAG, "onResume"); startBackgroundThread(); if (textureView.isAvailable()) { openCamera(); } else { textureView.setSurfaceTextureListener(textureListener); } } @Override protected void onPause() { Log.e(TAG, "onPause"); //closeCamera(); stopBackgroundThread(); super.onPause(); } }
/******************************************************************************* * Copyright (c) 2015, 2016 * * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), * to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, * and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER * DEALINGS IN THE SOFTWARE. *******************************************************************************/ package jsettlers.common.buildings; import java.util.EnumSet; import java.util.HashSet; import java.util.Set; import java.util.concurrent.ConcurrentLinkedQueue; import jsettlers.common.buildings.jobs.IBuildingJob; import jsettlers.common.buildings.loader.BuildingFile; import jsettlers.common.buildings.stacks.ConstructionStack; import jsettlers.common.buildings.stacks.RelativeStack; import jsettlers.common.images.ImageLink; import jsettlers.common.landscape.ELandscapeType; import jsettlers.common.movable.EMovableType; import jsettlers.common.position.RelativePoint; /** * This interface defines the main building type. * * @author Michael Zangl * @author Andreas Eberle */ public enum EBuildingType { STONECUTTER, FORESTER, LUMBERJACK, SAWMILL, COALMINE, IRONMINE, GOLDMINE, GOLDMELT, IRONMELT, TOOLSMITH, WEAPONSMITH, FARM, PIG_FARM, /** * Needs to implement {@link IBuilding.IMill} */ MILL, WATERWORKS, SLAUGHTERHOUSE, BAKER, FISHER, WINEGROWER, CHARCOAL_BURNER, DONKEY_FARM, SMALL_LIVINGHOUSE, MEDIUM_LIVINGHOUSE, BIG_LIVINGHOUSE, LOOKOUT_TOWER, TOWER, BIG_TOWER, CASTLE, HOSPITAL, BARRACK, DOCKYARD, HARBOR, STOCK, TEMPLE, BIG_TEMPLE, MARKET_PLACE; /** * A copy of {@link #values()}. Do not modify this array. This is intended for quicker access to this value. */ public static final EBuildingType[] VALUES = EBuildingType.values(); /** * The number of buildings in the {@link #VALUES} array. */ public static final int NUMBER_OF_BUILDINGS = VALUES.length; private static final EnumSet<EBuildingType> MILITARY_BUILDINGS = EnumSet.of(TOWER, BIG_TOWER, CASTLE); private static final EnumSet<EBuildingType> MINE_BUILDINGS = EnumSet.of(GOLDMINE, IRONMINE, COALMINE); /** * The ordinal of this type. Yields more performance than using {@link #ordinal()} */ public final int ordinal; private final IBuildingJob startJob; private final EMovableType workerType; private final RelativePoint doorTile; private final RelativePoint[] blockedTiles; private final short workRadius; private final boolean mine; private final ConstructionStack[] constructionStacks; private final RelativeStack[] requestStacks; private final RelativeStack[] offerStacks; private final RelativePoint workCenter; private final RelativePoint flag; private final RelativeBricklayer[] bricklayers; private final byte numberOfConstructionMaterials; private final ImageLink guiImage; private final ImageLink[] images; private final ImageLink[] buildImages; private final RelativePoint[] protectedTiles; private final RelativePoint[] buildMarks; private final EnumSet<ELandscapeType> groundTypes; private final short viewDistance; private final OccupierPlace[] occupierPlaces; private final BuildingAreaBitSet buildingAreaBitSet; /** * Constructs an enum object. */ EBuildingType() { this.ordinal = ordinal(); BuildingFile file = new BuildingFile(this.toString()); startJob = file.getStartJob(); workerType = file.getWorkerType(); doorTile = file.getDoor(); blockedTiles = file.getBlockedTiles(); protectedTiles = file.getProtectedTiles(); constructionStacks = file.getConstructionRequiredStacks(); requestStacks = file.getRequestStacks(); offerStacks = file.getOfferStacks(); workRadius = file.getWorkradius(); workCenter = file.getWorkcenter(); mine = file.isMine(); flag = file.getFlag(); bricklayers = file.getBricklayers(); occupierPlaces = file.getOccupyerPlaces(); guiImage = file.getGuiImage(); images = file.getImages(); buildImages = file.getBuildImages(); buildMarks = file.getBuildmarks(); groundTypes = EnumSet.copyOf(file.getGroundtypes()); viewDistance = file.getViewdistance(); this.numberOfConstructionMaterials = calculateNumberOfConstructionMaterials(); this.buildingAreaBitSet = new BuildingAreaBitSet(getBuildingArea()); if (mine) { this.buildingAreaBitSet.setCenter((short) 1, (short) 1); } } private byte calculateNumberOfConstructionMaterials() { byte sum = 0; for (ConstructionStack stack : getConstructionStacks()) { sum += stack.requiredForBuild(); } return sum; } public RelativePoint[] getBuildingArea() { return protectedTiles; } /** * Gets the job a worker for this building should start with. * * @return That {@link IBuildingJob} */ public final IBuildingJob getStartJob() { return startJob; } /** * Gets the type of worker required for the building. * * @return The worker or <code>null</code> if no worker is required. */ public final EMovableType getWorkerType() { return workerType; } /** * Gets the position of the door for this building. * * @return The door. */ public final RelativePoint getDoorTile() { return doorTile; } /** * Gets a list of blocked positions. * * @return The list of blocked positions. */ public final RelativePoint[] getBlockedTiles() { return blockedTiles; } /** * Gets the tiles that are protected by this building. On thse tiles, no other buildings may be build. * * @return The tiles as array. */ public final RelativePoint[] getProtectedTiles() { return protectedTiles; } /** * Gets the images needed to display this building. They are rendered in the order provided. * * @return The images */ public final ImageLink[] getImages() { return images; } /** * Gets the images needed to display this building while it si build. They are rendered in the order provided. * * @return The images */ public final ImageLink[] getBuildImages() { return buildImages; } /** * Gets the gui image that is displayed in the building selection dialog. * * @return The image. It may be <code>null</code> */ public final ImageLink getGuiImage() { return guiImage; } /** * Gets the working radius of the building. If it is 0, the building does not support a working radius. * * @return The radius. */ public final short getWorkRadius() { return workRadius; } /** * Gets the default work center for the building type. * * @return The default work center position. */ public final RelativePoint getDefaultWorkcenter() { return workCenter; } /** * Gets the position of the flag for this building. The flag type is determined by the building itself. * * @return The flag position. */ public final RelativePoint getFlag() { return flag; } /** * Gets the positions where the bricklayers should stand to build the house. * * @return The positions. * @see RelativeBricklayer */ public final RelativeBricklayer[] getBricklayers() { return bricklayers; } /** * Gets the positions of the build marks (sticks) for this building. * * @return The positions of the marks. */ public final RelativePoint[] getBuildMarks() { return buildMarks; } /** * Gets the ground types this building can be placed on. * * @return The ground types. */ public final Set<ELandscapeType> getGroundTypes() { return groundTypes; } /** * Gets the distance the FOW should be set to visible around this building. * * @return The view distance. */ public final short getViewDistance() { return viewDistance; } /** * Gets the places where occupiers can be in this building. * * @return The places. * @see OccupierPlace */ public final OccupierPlace[] getOccupierPlaces() { return occupierPlaces; } /** * Queries a building job with the given name that needs to be accessible from the start job. * * @param jobname * The name of the job. * @return The job if found. * @throws IllegalArgumentException * If the name was not found. */ public final IBuildingJob getJobByName(String jobname) { HashSet<String> visited = new HashSet<String>(); ConcurrentLinkedQueue<IBuildingJob> queue = new ConcurrentLinkedQueue<IBuildingJob>(); queue.add(startJob); while (!queue.isEmpty()) { IBuildingJob job = queue.poll(); if (visited.contains(job.getName())) { continue; } if (job.getName().equals(jobname)) { return job; } visited.add(job.getName()); queue.add(job.getNextFailJob()); queue.add(job.getNextSucessJob()); } throw new IllegalArgumentException("This building has no job with name " + jobname); } /** * Gets the area for this building. * * @return The building area. */ public final BuildingAreaBitSet getBuildingAreaBitSet() { return buildingAreaBitSet; } /** * Gets the materials required to build this building and where to place them. * * @return The array of material stacks. */ public ConstructionStack[] getConstructionStacks() { return constructionStacks; } /** * Get the amount of material required to build this house. Usually the number of stone + planks. * * @return The number of materials required to construct the building. */ public final byte getNumberOfConstructionMaterials() { return numberOfConstructionMaterials; } /** * Gets the request stacks required to operate this building. * * @return The request stacks. */ public RelativeStack[] getRequestStacks() { return requestStacks; } /** * Gets the positions where the building should offer materials. * * @return The offer positions. */ public RelativeStack[] getOfferStacks() { return offerStacks; } /** * Checks if this building is a mine. * * @return <code>true</code> iff this building is a mine. */ public boolean isMine() { return mine; } public boolean needsFlattenedGround() { return !mine; } /** * Checks if this building is a military building. * * @return <code>true</code> iff this is a military building. */ public boolean isMilitaryBuilding() { return MILITARY_BUILDINGS.contains(this); } /** * Gets an collection of all military buildings. * * @return The buildings. */ public static EnumSet<EBuildingType> getMilitaryBuildings() { return MILITARY_BUILDINGS; } public Set<ELandscapeType> getRequiredGroundTypeAt(int relativeX, int relativeY) { if (relativeX == 0 && relativeY == 0 && mine) { // if it is a mine and we are in the center return ELandscapeType.MOUNTAIN_TYPES; } else { return groundTypes; } } }
package org.gbif.refine.datasets.rls; import org.gbif.api.model.checklistbank.NameUsage; import org.gbif.api.model.checklistbank.NameUsageMatch; import org.gbif.api.model.common.LinneanClassification; import org.gbif.api.service.checklistbank.NameUsageMatchingService; import org.gbif.api.vocabulary.Country; import org.gbif.api.vocabulary.Rank; import org.gbif.common.parsers.CountryParser; import org.gbif.common.parsers.core.ParseResult; import org.gbif.io.CSVReader; import org.gbif.io.CSVReaderFactory; import org.gbif.refine.client.WebserviceClientModule; import org.gbif.refine.utils.FileUtils; import org.gbif.refine.utils.TermUtils; import org.gbif.utils.file.ClosableReportingIterator; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.Writer; import java.util.Arrays; import java.util.Set; import javax.validation.constraints.NotNull; import com.google.common.collect.Sets; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class is used to clean, augment, and transform the original Global Reef Fish dataset published by Reef Life * Survey into a DwC sample event, star-formatted dataset consisting of event records (core records) and their * associated occurrences (extension records). The dataset is hosted here http://ipt.ala.org.au/resource?r=global * and indexed here: https://www.gbif.org/dataset/38f06820-08c5-42b2-94f6-47cc3e83a54a The publisher expressed * a desire to republish this dataset biannually hence thousands of new observations get added each month. At least * the publisher (Rick Stuart-Smith) has an account on the ALA IPT and familiar with updating its metadata. To update * its data, however, it needs to be downloaded from RLS' GeoServer here: * http://geoserver-rls.imas.utas.edu.au/geoserver/RLS/ows?service=WFS&version=1.0.0&request=GetFeature&typeName=RLS:M1_DATA&outputFormat=csv * Then input into this script. The output are two files (events and occurrences) that can then be uploaded to the IPT * and used to republish the dataset. */ public class GlobalReefFish { private static final Logger LOG = LoggerFactory.getLogger(GlobalReefFish.class); private static final CountryParser COUNTRY_PARSER = CountryParser.getInstance(); private static final NameUsageMatchingService MATCHING_SERVICE = WebserviceClientModule.webserviceClientReadOnly().getInstance(NameUsageMatchingService.class); public static void main(String[] args) throws IOException { // directory where files should be written to File output = org.gbif.utils.file.FileUtils.createTempDir(); processGlobalReefFish(output); LOG.info( "Processing M1_DATA-100.csv complete! event.txt and occurrence.txt written to: " + output.getAbsolutePath()); } /** * Iterates over original source file and does the following: * i) cleans it (e.g. maps column header names to DwC term names, matching scientific names to GBIF Backbone * Taxonomy) * ii) augments it (e.g. adds new columns for sample size, higher taxonomy, etc) * iii) transforms it into star format (core file events.txt is list of unique sampling events, and extension file * occurrence.txt is a list of all observations derived from all sampling events) * * @param output directory to write files to * * @throws IOException if method fails */ public static void processGlobalReefFish(File output) throws IOException { // load the original source file to process InputStream fis = GlobalReefFish.class.getResourceAsStream("/datasets/rls/M1_DATA-1.csv"); // create an iterator on the file CSVReader reader = CSVReaderFactory.build(fis, "UTF-8", ",", '"', 1); // get header row for the new event and occurrence files that this method will output String[] header = getHeader(); // sampling events file Writer writerEvents = FileUtils.startEventsFile(output, header); // observations file Writer writerOccs = FileUtils.startOccurrencesFile(output, header); // to capture all unique eventIDs Set<String> events = Sets.newHashSet(); // to capture all names that don't match GBIF Backbone Taxonomy Set<String> nonMatchingNames = Sets.newHashSet(); ClosableReportingIterator<String[]> iter = null; int line = 0; try { iter = reader.iterator(); while (iter.hasNext()) { line++; String[] record = iter.next(); if (record == null || record.length == 0) { continue; } // create new augmented record String[] modifiedRecord = Arrays.copyOf(record, header.length); // add static values modifiedRecord[20] = "500"; // sampleSizeValue modifiedRecord[21] = "square_metre"; // sampleSizeUnit modifiedRecord[22] = "Reef Life Survey methods"; // samplingProtocol modifiedRecord[23] = "http://creativecommons.org/licenses/by/4.0/legalcode"; // license modifiedRecord[24] = "Event"; // type modifiedRecord[25] = "WGS84"; // geodeticDatum modifiedRecord[26] = "individuals"; // organismQuantityType modifiedRecord[33] = "RLS"; // rightsHolder modifiedRecord[34] = "RLS"; // institutionCode modifiedRecord[35] = "RLS"; // ownerInstitutionCode modifiedRecord[36] = "HumanObservation"; // basisOfRecord modifiedRecord[38] = "Two blocks form a complete transect"; // eventRemarks // indicate mean depth is in meters String meanDepth = modifiedRecord[11]; modifiedRecord[11] = meanDepth + " m"; // indicate which Block (of two) observation was made in String blockNumber = modifiedRecord[16]; modifiedRecord[16] = "Observed in block #" + blockNumber; // occurrenceStatus (present vs absent) modifiedRecord[27] = TermUtils.getOccurrenceStatus(Integer.valueOf(modifiedRecord[17])).toString().toLowerCase(); // construct higherGeography using formula: Country | Realm | Ecoregion String country = modifiedRecord[3]; String realm = modifiedRecord[5]; String ecoregion = modifiedRecord[4]; modifiedRecord[28] = country + " | " + realm + " | " + ecoregion; // store individualCount even though it's the same as organismQuantity modifiedRecord[37] = modifiedRecord[17]; // value copied from organismQuantity // add 2 letter ISO country code ParseResult<Country> result = COUNTRY_PARSER.parse(modifiedRecord[3]); if (result.isSuccessful()) { modifiedRecord[39] = result.getPayload().getIso2LetterCode(); } // verify taxonomy String name = modifiedRecord[15]; // for more accurate match, we take higher taxonomy into consideration LinneanClassification cl = new NameUsage(); cl.setPhylum(modifiedRecord[12]); cl.setClazz(modifiedRecord[13]); cl.setFamily(modifiedRecord[14]); // only if binomial, set species if (name.split(" ").length == 2 && !name.endsWith("spp.")) { cl.setSpecies(name); } // lowest rank specified // TODO: taxonRank is unmapped in 2016-07-20 publication - either fix or remove in future publication Rank rank = TermUtils.lowestRank(cl); if (rank != null) { modifiedRecord[29] = rank.toString(); } // verify name, and add higher taxonomy NameUsageMatch match = MATCHING_SERVICE.match(name, rank, cl, false, false); if (match.getMatchType().equals(NameUsageMatch.MatchType.EXACT)) { modifiedRecord[30] = match.getStatus().toString(); modifiedRecord[31] = match.getKingdom(); modifiedRecord[32] = match.getOrder(); } else { if (!nonMatchingNames.contains(name)) { LOG.error( match.getMatchType().toString() + " match for: " + name + " (with rank " + rank + ") to: " + match .getScientificName() + " (with rank " + match.getRank() + ")" + ". See example record with FID: " + modifiedRecord[0]); nonMatchingNames.add(name); } } // always output line to new occurrences file String row = FileUtils.tabRow(modifiedRecord); writerOccs.write(row); // only output line to events file if event hasn't been included yet String eventID = modifiedRecord[2]; if (!events.contains(eventID)) { writerEvents.write(row); events.add(eventID); } } LOG.info("Iterated over " + line + " rows."); LOG.info("Found " + events.size() + " unique events."); LOG.error("Found " + nonMatchingNames.size() + " non-matching names."); } catch (Exception e) { // some error validating this file, report LOG.error("Exception caught while iterating over file", e); } finally { if (iter != null) { iter.close(); } reader.close(); writerEvents.close(); writerOccs.close(); } } /** * @return array of column names in output files (event.txt, occurrence.txt) */ @NotNull private static String[] getHeader() { String[] header = new String[40]; // header 0: FID, e.g. M1_DATA.1 // RLS definition: Non-stable record-level identifier // maps to dwc:occurrenceID header[0] = "occurrenceID"; // header 1: Key, e.g. 1 // no mapping to DwC header[1] = "Key"; // header 2: SurveyID, e.g. 912344644 // RLS definition: Identifier of individual 50 m transects // maps to dwc:eventID header[2] = "eventID"; // header 3: Country, e.g. Indonesia // RLS definition: Country (or largely-autonomous state) // maps to dwc:country header[3] = "country"; // header 4: Ecoregion, e.g. Lesser Sunda // RLS definition: Location within the Marine Ecoregions of the World provided in Spalding et al.13. // no direct mapping to DwC, but used to construct dwc:higherGeography header[4] = "Ecoregion"; // header 5: Realm, e.g. Cenral Indo-Pacific // RLS definition: Biogeographic realm as classified in the Marine Ecoregions of the World13 // no direct mapping to DwC, used to construct dwc:higherGeography header[5] = "Realm"; // header 6: SiteCode, e.g. BALI2 // RLS definition: Identifier of unique geographical coordinates // maps to dwc:locationID header[6] = "locationID"; // header 7: Site, e.g. Paradise House reef // RLS definition: Descriptive name of the site // maps to dwc:locality header[7] = "locality"; // header 8: SiteLat, e.g. -8.2773 // RLS definition: Latitude of site (WGS84) // maps to dwc:decimalLatitude header[8] = "decimalLatitude"; // header 9: SiteLong, e.g. 115.5945 // RLS definition: Longitude of site (WGS84) // maps to dwc:decimalLongitude header[9] = "decimalLongitude"; // header 10: SurveyDate, e.g. 2014-10-26T00:00:00 // RLS definition: Date of survey // maps to dwc:eventDate header[10] = "eventDate"; // header 11: Depth, e.g. 4 metres // RLS definition: Mean depth of transect line as recorded on dive computer (note: this does not account for tide or deviations from the mean value as a consequence of imperfect tracking of the depth contour along the bottom)) // maps to dwc:verbatimDepth header[11] = "verbatimDepth"; // header 12: Phylum // RLS definition: Taxonomic Phylum // maps to dwc:phylum header[12] = "phylum"; // header 13: Class // RLS definition: Taxonomic Class // maps to dwc:class header[13] = "class"; // header 14: Family // RLS definition: Taxonomic Family // maps to dwc:family header[14] = "family"; // header 15: Taxon // RLS definition: Species name, corrected for recent taxonomic changes and grouping of records not at species level) // maps to dwc:scientificName header[15] = "scientificName"; // header 16: Block // RLS definition: Identifies which 5 m wide block (of two) within each complete transect (surveyID) - Values = 1 (block on deeper/offshore side of transect line) , 2 (block on shallower/inshore side)) // maps to dwc:occurrenceRemarks (preceded with Block + n) header[16] = "occurrenceRemarks"; // header 17: Total // RLS definition: Total abundance for record on that block, transect, site, date combination // maps to dwc:organismQuantity (must pair with dwc:organismQuantityType) header[17] = "organismQuantity"; // header 18: Diver // RLS definition: Initials of the diver who collected the datum // maps to dwc:recordedBy header[18] = "recordedBy"; // header 19: geom // RLS definition: WKT POINT // maps to dwc:footprintWKT header[19] = "footprintWKT"; // additional DwC columns header[20] = "sampleSizeValue"; header[21] = "sampleSizeUnit"; header[22] = "samplingProtocol"; header[23] = "license"; header[24] = "type"; header[25] = "geodeticDatum"; header[26] = "organismQuantityType"; header[27] = "occurrenceStatus"; header[28] = "higherGeography"; header[29] = "taxonRank"; header[30] = "taxonomicStatus"; header[31] = "kingdom"; header[32] = "order"; header[33] = "rightsHolder"; header[34] = "institutionCode"; header[35] = "ownerInstitutionCode"; header[36] = "basisOfRecord"; header[37] = "individualCount"; header[38] = "eventRemarks"; header[39] = "countryCode"; // TODO add info about MEOW Marine Ecosystems of the World classification return header; } }
/* * Licensed to ElasticSearch and Shay Banon under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. ElasticSearch licenses this * file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.action.admin.indices.stats; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.broadcast.BroadcastOperationResponse; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilderString; import java.io.IOException; import java.util.List; import java.util.Map; import java.util.Set; /** */ public class IndicesStats extends BroadcastOperationResponse implements ToXContent { private ShardStats[] shards; IndicesStats() { } IndicesStats(ShardStats[] shards, ClusterState clusterState, int totalShards, int successfulShards, int failedShards, List<ShardOperationFailedException> shardFailures) { super(totalShards, successfulShards, failedShards, shardFailures); this.shards = shards; } public ShardStats[] shards() { return this.shards; } public ShardStats[] getShards() { return this.shards; } public ShardStats getAt(int position) { return shards[position]; } public IndexStats index(String index) { return indices().get(index); } public Map<String, IndexStats> getIndices() { return indices(); } private Map<String, IndexStats> indicesStats; public Map<String, IndexStats> indices() { if (indicesStats != null) { return indicesStats; } Map<String, IndexStats> indicesStats = Maps.newHashMap(); Set<String> indices = Sets.newHashSet(); for (ShardStats shard : shards) { indices.add(shard.index()); } for (String index : indices) { List<ShardStats> shards = Lists.newArrayList(); for (ShardStats shard : this.shards) { if (shard.shardRouting().index().equals(index)) { shards.add(shard); } } indicesStats.put(index, new IndexStats(index, shards.toArray(new ShardStats[shards.size()]))); } this.indicesStats = indicesStats; return indicesStats; } private CommonStats total = null; public CommonStats getTotal() { return total(); } public CommonStats total() { if (total != null) { return total; } CommonStats stats = new CommonStats(); for (ShardStats shard : shards) { stats.add(shard.stats()); } total = stats; return stats; } private CommonStats primary = null; public CommonStats getPrimaries() { return primaries(); } public CommonStats primaries() { if (primary != null) { return primary; } CommonStats stats = new CommonStats(); for (ShardStats shard : shards) { if (shard.shardRouting().primary()) { stats.add(shard.stats()); } } primary = stats; return stats; } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); shards = new ShardStats[in.readVInt()]; for (int i = 0; i < shards.length; i++) { shards[i] = ShardStats.readShardStats(in); } } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeVInt(shards.length); for (ShardStats shard : shards) { shard.writeTo(out); } } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject("_all"); builder.startObject("primaries"); primaries().toXContent(builder, params); builder.endObject(); builder.startObject("total"); total().toXContent(builder, params); builder.endObject(); builder.startObject(Fields.INDICES); for (IndexStats indexStats : indices().values()) { builder.startObject(indexStats.index(), XContentBuilder.FieldCaseConversion.NONE); builder.startObject("primaries"); indexStats.primaries().toXContent(builder, params); builder.endObject(); builder.startObject("total"); indexStats.total().toXContent(builder, params); builder.endObject(); if ("shards".equalsIgnoreCase(params.param("level", null))) { builder.startObject(Fields.SHARDS); for (IndexShardStats indexShardStats : indexStats) { builder.startArray(Integer.toString(indexShardStats.shardId().id())); for (ShardStats shardStats : indexShardStats) { builder.startObject(); builder.startObject(Fields.ROUTING) .field(Fields.STATE, shardStats.shardRouting().state()) .field(Fields.PRIMARY, shardStats.shardRouting().primary()) .field(Fields.NODE, shardStats.shardRouting().currentNodeId()) .field(Fields.RELOCATING_NODE, shardStats.shardRouting().relocatingNodeId()) .endObject(); shardStats.stats().toXContent(builder, params); builder.endObject(); } builder.endArray(); } builder.endObject(); } builder.endObject(); } builder.endObject(); builder.endObject(); return builder; } static final class Fields { static final XContentBuilderString INDICES = new XContentBuilderString("indices"); static final XContentBuilderString SHARDS = new XContentBuilderString("shards"); static final XContentBuilderString ROUTING = new XContentBuilderString("routing"); static final XContentBuilderString STATE = new XContentBuilderString("state"); static final XContentBuilderString PRIMARY = new XContentBuilderString("primary"); static final XContentBuilderString NODE = new XContentBuilderString("node"); static final XContentBuilderString RELOCATING_NODE = new XContentBuilderString("relocating_node"); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.phoenix.hbase.index.write; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Abortable; import org.apache.hadoop.hbase.Stoppable; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Row; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.client.TableDescriptorBuilder; import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; import org.apache.hadoop.hbase.regionserver.Region; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.VersionInfo; import org.apache.phoenix.hbase.index.IndexTableName; import org.apache.phoenix.hbase.index.StubAbortable; import org.apache.phoenix.hbase.index.exception.IndexWriteException; import org.apache.phoenix.hbase.index.util.ImmutableBytesPtr; import org.apache.phoenix.util.ScanUtil; import org.junit.Rule; import org.junit.Test; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; public class TestIndexWriter { private static final Log LOG = LogFactory.getLog(TestIndexWriter.class); @Rule public IndexTableName testName = new IndexTableName(); private final byte[] row = Bytes.toBytes("row"); @Test public void getDefaultWriter() throws Exception { Configuration conf = new Configuration(false); RegionCoprocessorEnvironment env = Mockito.mock(RegionCoprocessorEnvironment.class); Mockito.when(env.getConfiguration()).thenReturn(conf); assertNotNull(IndexWriter.getCommitter(env)); } @Test public void getDefaultFailurePolicy() throws Exception { Configuration conf = new Configuration(false); RegionCoprocessorEnvironment env = Mockito.mock(RegionCoprocessorEnvironment.class); Region region = Mockito.mock(Region.class); Mockito.when(env.getRegion()).thenReturn(region); Mockito.when(env.getConfiguration()).thenReturn(conf); Mockito.when(region.getTableDescriptor()).thenReturn( TableDescriptorBuilder.newBuilder(TableName.valueOf("dummy")).build()); assertNotNull(IndexWriter.getFailurePolicy(env)); } /** * With the move to using a pool of threads to write, we need to ensure that we still block until * all index writes for a mutation/batch are completed. * @throws Exception on failure */ @Test public void testSynchronouslyCompletesAllWrites() throws Exception { LOG.info("Starting " + testName.getTableNameString()); LOG.info("Current thread is interrupted: " + Thread.interrupted()); Abortable abort = new StubAbortable(); Stoppable stop = Mockito.mock(Stoppable.class); RegionCoprocessorEnvironment e =Mockito.mock(RegionCoprocessorEnvironment.class); Configuration conf =new Configuration(); Mockito.when(e.getConfiguration()).thenReturn(conf); Mockito.when(e.getSharedData()).thenReturn(new ConcurrentHashMap<String,Object>()); Region mockRegion = Mockito.mock(Region.class); Mockito.when(e.getRegion()).thenReturn(mockRegion); TableDescriptor mockTableDesc = Mockito.mock(TableDescriptor.class); Mockito.when(mockRegion.getTableDescriptor()).thenReturn(mockTableDesc); TableName mockTN = TableName.valueOf("test"); Mockito.when(mockTableDesc.getTableName()).thenReturn(mockTN); Connection mockConnection = Mockito.mock(Connection.class); Mockito.when(e.getConnection()).thenReturn(mockConnection); ExecutorService exec = Executors.newFixedThreadPool(1); Map<ImmutableBytesPtr, Table> tables = new HashMap<ImmutableBytesPtr, Table>(); FakeTableFactory factory = new FakeTableFactory(tables); byte[] tableName = this.testName.getTableName(); Put m = new Put(row); m.addColumn(Bytes.toBytes("family"), Bytes.toBytes("qual"), null); Collection<Pair<Mutation, byte[]>> indexUpdates = Arrays.asList(new Pair<Mutation, byte[]>(m, tableName)); Table table = Mockito.mock(Table.class); final boolean[] completed = new boolean[] { false }; Mockito.doAnswer(new Answer<Void>() { @Override public Void answer(InvocationOnMock invocation) throws Throwable { // just keep track that it was called completed[0] = true; return null; } }).when(table).batch(Mockito.anyList(), Mockito.any()); Mockito.when(table.getName()).thenReturn(TableName.valueOf(testName.getTableName())); // add the table to the set of tables, so its returned to the writer tables.put(new ImmutableBytesPtr(tableName), table); // setup the writer and failure policy TrackingParallelWriterIndexCommitter committer = new TrackingParallelWriterIndexCommitter(VersionInfo.getVersion()); committer.setup(factory, exec, stop, e); KillServerOnFailurePolicy policy = new KillServerOnFailurePolicy(); policy.setup(stop, e); IndexWriter writer = new IndexWriter(committer, policy); writer.write(indexUpdates, ScanUtil.UNKNOWN_CLIENT_VERSION); assertTrue("Writer returned before the table batch completed! Likely a race condition tripped", completed[0]); writer.stop(this.testName.getTableNameString() + " finished"); assertTrue("Factory didn't get shutdown after writer#stop!", factory.shutdown); assertTrue("ExectorService isn't terminated after writer#stop!", exec.isShutdown()); } /** * Test that if we get an interruption to to the thread while doing a batch (e.g. via shutdown), * that we correctly end the task * @throws Exception on failure */ @Test public void testShutdownInterruptsAsExpected() throws Exception { Stoppable stop = Mockito.mock(Stoppable.class); Abortable abort = new StubAbortable(); // single thread factory so the older request gets queued ExecutorService exec = Executors.newFixedThreadPool(1); Map<ImmutableBytesPtr, Table> tables = new HashMap<ImmutableBytesPtr, Table>(); RegionCoprocessorEnvironment e =Mockito.mock(RegionCoprocessorEnvironment.class); Configuration conf =new Configuration(); Mockito.when(e.getConfiguration()).thenReturn(conf); Mockito.when(e.getSharedData()).thenReturn(new ConcurrentHashMap<String,Object>()); Region mockRegion = Mockito.mock(Region.class); Mockito.when(e.getRegion()).thenReturn(mockRegion); TableDescriptor mockTableDesc = Mockito.mock(TableDescriptor.class); Mockito.when(mockRegion.getTableDescriptor()).thenReturn(mockTableDesc); Mockito.when(mockTableDesc.getTableName()).thenReturn(TableName.valueOf("test")); Connection mockConnection = Mockito.mock(Connection.class); Mockito.when(e.getConnection()).thenReturn(mockConnection); FakeTableFactory factory = new FakeTableFactory(tables); byte[] tableName = this.testName.getTableName(); Table table = Mockito.mock(Table.class); Mockito.when(table.getName()).thenReturn(TableName.valueOf(tableName)); final CountDownLatch writeStartedLatch = new CountDownLatch(1); // latch never gets counted down, so we wait forever final CountDownLatch waitOnAbortedLatch = new CountDownLatch(1); Mockito.doAnswer(new Answer<Void>() { @Override public Void answer(InvocationOnMock invocation) throws Throwable { LOG.info("Write started"); writeStartedLatch.countDown(); // when we interrupt the thread for shutdown, we should see this throw an interrupt too try { waitOnAbortedLatch.await(); } catch (InterruptedException e) { LOG.info("Correctly interrupted while writing!"); throw e; } return null; } }).when(table).batch(Mockito.anyListOf(Row.class), Mockito.any()); // add the tables to the set of tables, so its returned to the writer tables.put(new ImmutableBytesPtr(tableName), table); // update a single table Put m = new Put(row); m.addColumn(Bytes.toBytes("family"), Bytes.toBytes("qual"), null); final List<Pair<Mutation, byte[]>> indexUpdates = new ArrayList<Pair<Mutation, byte[]>>(); indexUpdates.add(new Pair<Mutation, byte[]>(m, tableName)); // setup the writer TrackingParallelWriterIndexCommitter committer = new TrackingParallelWriterIndexCommitter(VersionInfo.getVersion()); committer.setup(factory, exec, stop, e ); KillServerOnFailurePolicy policy = new KillServerOnFailurePolicy(); policy.setup(stop, e); final IndexWriter writer = new IndexWriter(committer, policy); final boolean[] failedWrite = new boolean[] { false }; Thread primaryWriter = new Thread() { @Override public void run() { try { writer.write(indexUpdates, ScanUtil.UNKNOWN_CLIENT_VERSION); } catch (IndexWriteException e) { failedWrite[0] = true; } } }; primaryWriter.start(); // wait for the write to start before intentionally shutdown the pool writeStartedLatch.await(); writer.stop("Shutting down writer for test " + this.testName.getTableNameString()); primaryWriter.join(); assertTrue("Writer should have failed because of the stop we issued", failedWrite[0]); } }
package mil.nga.geopackage.tiles.user; import android.content.Context; import android.database.Cursor; import android.database.sqlite.SQLiteException; import android.graphics.Bitmap; import com.j256.ormlite.stmt.PreparedQuery; import com.j256.ormlite.stmt.QueryBuilder; import junit.framework.TestCase; import java.io.IOException; import java.nio.ByteBuffer; import java.sql.SQLException; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import mil.nga.geopackage.BoundingBox; import mil.nga.geopackage.GeoPackage; import mil.nga.geopackage.GeoPackageException; import mil.nga.geopackage.contents.Contents; import mil.nga.geopackage.contents.ContentsDao; import mil.nga.geopackage.db.GeoPackageDataType; import mil.nga.geopackage.db.ResultUtils; import mil.nga.geopackage.extension.coverage.CoverageData; import mil.nga.geopackage.io.BitmapConverter; import mil.nga.geopackage.TestConstants; import mil.nga.geopackage.TestUtils; import mil.nga.geopackage.geom.GeoPackageGeometryDataUtils; import mil.nga.geopackage.tiles.TileBoundingBoxUtils; import mil.nga.geopackage.tiles.TileGrid; import mil.nga.geopackage.tiles.matrix.TileMatrix; import mil.nga.geopackage.tiles.matrix.TileMatrixDao; import mil.nga.geopackage.tiles.matrix.TileMatrixKey; import mil.nga.geopackage.tiles.matrixset.TileMatrixSet; import mil.nga.geopackage.tiles.matrixset.TileMatrixSetDao; import mil.nga.geopackage.user.ColumnValue; import mil.nga.proj.ProjectionConstants; import mil.nga.proj.ProjectionFactory; /** * Tiles Utility test methods * * @author osbornb */ public class TileUtils { /** * Test read * * @param geoPackage GeoPackage * @throws SQLException upon error */ public static void testRead(GeoPackage geoPackage) throws SQLException { TileMatrixSetDao tileMatrixSetDao = geoPackage.getTileMatrixSetDao(); if (tileMatrixSetDao.isTableExists()) { List<TileMatrixSet> results = tileMatrixSetDao.queryForAll(); results = TestUtils.getRandomList(results, 3); for (TileMatrixSet tileMatrixSet : results) { // Test the get tile DAO methods TileDao dao = geoPackage.getTileDao(tileMatrixSet); TestCase.assertNotNull(dao); dao = geoPackage.getTileDao(tileMatrixSet.getContents()); TestCase.assertNotNull(dao); dao = geoPackage.getTileDao(tileMatrixSet.getTableName()); TestCase.assertNotNull(dao); TestCase.assertNotNull(dao.getDb()); TestCase.assertEquals(tileMatrixSet.getId(), dao .getTileMatrixSet().getId()); TestCase.assertEquals(tileMatrixSet.getTableName(), dao.getTableName()); TestCase.assertFalse(dao.getTileMatrices().isEmpty()); TileTable tileTable = dao.getTable(); String[] columns = tileTable.getColumnNames(); int zoomLevelIndex = tileTable.getZoomLevelColumnIndex(); TestCase.assertTrue(zoomLevelIndex >= 0 && zoomLevelIndex < columns.length); TestCase.assertEquals(TileTable.COLUMN_ZOOM_LEVEL, columns[zoomLevelIndex]); int tileColumnIndex = tileTable.getTileColumnColumnIndex(); TestCase.assertTrue(tileColumnIndex >= 0 && tileColumnIndex < columns.length); TestCase.assertEquals(TileTable.COLUMN_TILE_COLUMN, columns[tileColumnIndex]); int tileRowIndex = tileTable.getTileRowColumnIndex(); TestCase.assertTrue(tileRowIndex >= 0 && tileRowIndex < columns.length); TestCase.assertEquals(TileTable.COLUMN_TILE_ROW, columns[tileRowIndex]); int tileDataIndex = tileTable.getTileDataColumnIndex(); TestCase.assertTrue(tileDataIndex >= 0 && tileDataIndex < columns.length); TestCase.assertEquals(TileTable.COLUMN_TILE_DATA, columns[tileDataIndex]); // Query for all TileCursor cursor = dao.queryForAll(); int count = cursor.getCount(); int manualCount = 0; for(TileRow tileRow: cursor) { validateTileRow(dao, columns, tileRow, manualCount < 5); manualCount++; } TestCase.assertEquals(count, manualCount); cursor.close(); // Manually query for all and compare cursor = (TileCursor) dao.getDatabaseConnection().query(dao.getTableName(), null, null, null, null, null, null); count = cursor.getCount(); manualCount = 0; while (cursor.moveToNext()) { manualCount++; } TestCase.assertEquals(count, manualCount); TestCase.assertTrue("No tiles to test", count > 0); // Choose random tile int random = (int) (Math.random() * count); cursor.moveToPosition(random); TileRow tileRow = cursor.getRow(); cursor.close(); // Query by id TileRow queryTileRow = dao.queryForIdRow(tileRow.getId()); TestCase.assertNotNull(queryTileRow); TestCase.assertEquals(tileRow.getId(), queryTileRow.getId()); // Find two non id columns TileColumn column1 = null; TileColumn column2 = null; for (TileColumn column : tileRow.getTable().getColumns()) { if (!column.isPrimaryKey()) { if (column1 == null) { column1 = column; } else { column2 = column; break; } } } // Query for equal if (column1 != null) { Object column1Value = tileRow.getValue(column1.getName()); Class<?> column1ClassType = column1.getDataType() .getClassType(); boolean column1Decimal = column1ClassType == Double.class || column1ClassType == Float.class; ColumnValue column1TileValue; if (column1Decimal) { column1TileValue = new ColumnValue(column1Value, .000001); } else { column1TileValue = new ColumnValue(column1Value); } cursor = dao .queryForEq(column1.getName(), column1TileValue); TestCase.assertTrue(cursor.getCount() > 0); boolean found = false; while (cursor.moveToNext()) { queryTileRow = cursor.getRow(); TestCase.assertEquals(column1Value, queryTileRow.getValue(column1.getName())); if (!found) { found = tileRow.getId() == queryTileRow.getId(); } } TestCase.assertTrue(found); cursor.close(); // Query for field values Map<String, ColumnValue> fieldValues = new HashMap<String, ColumnValue>(); fieldValues.put(column1.getName(), column1TileValue); Object column2Value = null; ColumnValue column2TileValue; if (column2 != null) { column2Value = tileRow.getValue(column2.getName()); Class<?> column2ClassType = column2.getDataType() .getClassType(); boolean column2Decimal = column2ClassType == Double.class || column2ClassType == Float.class; if (column2Decimal) { column2TileValue = new ColumnValue(column2Value, .000001); } else { column2TileValue = new ColumnValue(column2Value); } fieldValues.put(column2.getName(), column2TileValue); } cursor = dao.queryForValueFieldValues(fieldValues); TestCase.assertTrue(cursor.getCount() > 0); found = false; while (cursor.moveToNext()) { queryTileRow = cursor.getRow(); TestCase.assertEquals(column1Value, queryTileRow.getValue(column1.getName())); if (column2 != null) { TestCase.assertEquals(column2Value, queryTileRow.getValue(column2.getName())); } if (!found) { found = tileRow.getId() == queryTileRow.getId(); } } TestCase.assertTrue(found); cursor.close(); } String previousColumn = null; for (String column : columns) { long expectedDistinctCount = dao .querySingleTypedResult( "SELECT COUNT(DISTINCT " + column + ") FROM " + dao.getTableName(), null); int distinctCount = dao.count(true, column); TestCase.assertEquals(expectedDistinctCount, distinctCount); if (dao.count(column + " IS NULL") > 0) { distinctCount++; } TileCursor expectedCursor = dao .rawQuery("SELECT DISTINCT " + column + " FROM " + dao.getTableName(), null); int expectedDistinctCursorCount = expectedCursor .getCount(); int expectedDistinctManualCursorCount = 0; while (expectedCursor.moveToNext()) { expectedDistinctManualCursorCount++; } expectedCursor.close(); TestCase.assertEquals(expectedDistinctManualCursorCount, expectedDistinctCursorCount); cursor = dao.query(true, new String[]{column}); TestCase.assertEquals(1, cursor.getColumnCount()); TestCase.assertEquals(expectedDistinctCursorCount, cursor.getCount()); TestCase.assertEquals(distinctCount, cursor.getCount()); cursor.close(); cursor = dao.query(new String[]{column}); TestCase.assertEquals(1, cursor.getColumnCount()); TestCase.assertEquals(count, cursor.getCount()); Set<Object> distinctValues = new HashSet<>(); while (cursor.moveToNext()) { Object value = cursor.getValue(column); distinctValues.add(value); } cursor.close(); if (!column .equals(tileTable.getTileDataColumn().getName())) { TestCase.assertEquals(distinctCount, distinctValues.size()); } if (previousColumn != null) { cursor = dao.query(true, new String[]{previousColumn, column}); TestCase.assertEquals(2, cursor.getColumnCount()); distinctCount = cursor.getCount(); if (distinctCount < 0) { distinctCount = 0; while (cursor.moveToNext()) { distinctCount++; } } cursor.close(); cursor = dao .query(new String[]{previousColumn, column}); TestCase.assertEquals(2, cursor.getColumnCount()); TestCase.assertEquals(count, cursor.getCount()); Map<Object, Set<Object>> distinctPairs = new HashMap<>(); while (cursor.moveToNext()) { Object previousValue = cursor .getValue(previousColumn); Object value = cursor.getValue(column); distinctValues = distinctPairs.get(previousValue); if (distinctValues == null) { distinctValues = new HashSet<>(); distinctPairs.put(previousValue, distinctValues); } distinctValues.add(value); } cursor.close(); int distinctPairsCount = 0; for (Set<Object> values : distinctPairs.values()) { distinctPairsCount += values.size(); } if (!previousColumn .equals(tileTable.getTileDataColumn().getName()) && !column.equals(tileTable.getTileDataColumn() .getName())) { TestCase.assertEquals(distinctCount, distinctPairsCount); } } previousColumn = column; } } } } /** * Validate a tile row * * @param dao * @param columns * @param tileRow * @param testBitmap */ private static void validateTileRow(TileDao dao, String[] columns, TileRow tileRow, boolean testBitmap) { TestCase.assertEquals(columns.length, tileRow.columnCount()); for (int i = 0; i < tileRow.columnCount(); i++) { TileColumn column = tileRow.getTable().getColumns().get(i); TestCase.assertEquals(i, column.getIndex()); TestCase.assertEquals(columns[i], tileRow.getColumnName(i)); TestCase.assertEquals(i, tileRow.getColumnIndex(columns[i])); int rowType = tileRow.getRowColumnType(i); Object value = tileRow.getValue(i); switch (rowType) { case Cursor.FIELD_TYPE_INTEGER: TestUtils.validateIntegerValue(value, column.getDataType()); break; case Cursor.FIELD_TYPE_FLOAT: TestUtils.validateFloatValue(value, column.getDataType()); break; case Cursor.FIELD_TYPE_STRING: TestCase.assertTrue(value instanceof String); break; case Cursor.FIELD_TYPE_BLOB: TestCase.assertTrue(value instanceof byte[]); break; case Cursor.FIELD_TYPE_NULL: TestCase.assertNull(value); break; } } TestCase.assertTrue(tileRow.getId() >= 0); TestCase.assertTrue(tileRow.getZoomLevel() >= 0); TestCase.assertTrue(tileRow.getTileColumn() >= 0); TestCase.assertTrue(tileRow.getTileRow() >= 0); byte[] tileData = tileRow.getTileData(); TestCase.assertNotNull(tileData); TestCase.assertTrue(tileData.length > 0); TileMatrix tileMatrix = dao.getTileMatrix(tileRow.getZoomLevel()); TestCase.assertNotNull(tileMatrix); if (testBitmap) { Bitmap bitmap = tileRow.getTileDataBitmap(); if (!dao.getTileMatrixSet().getContents().getDataTypeName().equalsIgnoreCase(CoverageData.GRIDDED_COVERAGE)) { TestCase.assertNotNull(bitmap); TestCase.assertEquals(tileMatrix.getTileWidth(), bitmap.getWidth()); TestCase.assertEquals(tileMatrix.getTileHeight(), bitmap.getHeight()); } } } /** * Test update * * @param testContext test context * @param geoPackage GeoPackage * @throws SQLException upon error * @throws IOException upon error */ public static void testUpdate(Context testContext, GeoPackage geoPackage) throws SQLException, IOException { TileMatrixSetDao tileMatrixSetDao = geoPackage.getTileMatrixSetDao(); if (tileMatrixSetDao.isTableExists()) { List<TileMatrixSet> results = tileMatrixSetDao.queryForAll(); results = TestUtils.getRandomList(results, 3); for (TileMatrixSet tileMatrixSet : results) { TileDao dao = geoPackage.getTileDao(tileMatrixSet); testUpdate(testContext, dao); } } } /** * Test updates for the tile table * * @param testContext test context * @param dao tile dao * @throws IOException upon error */ public static void testUpdate(Context testContext, TileDao dao) throws IOException { TestCase.assertNotNull(dao); TileCursor cursor = dao.queryForAll(); int count = cursor.getCount(); if (count > 0) { // Choose random tile int random = (int) (Math.random() * count); cursor.moveToPosition(random); String updatedString = null; String updatedLimitedString = null; Boolean updatedBoolean = null; Byte updatedByte = null; Short updatedShort = null; Integer updatedInteger = null; Long updatedLong = null; Float updatedFloat = null; Double updatedDouble = null; byte[] updatedBytes = null; byte[] updatedLimitedBytes = null; Integer updatedBitmapWidth = null; Integer updatedBitmapHeight = null; TileRow originalRow = cursor.getRow(); TileRow tileRow = cursor.getRow(); try { tileRow.setValue(tileRow.getPkColumnIndex(), 9); TestCase.fail("Updated the primary key value"); } catch (GeoPackageException e) { // expected } for (TileColumn tileColumn : dao.getTable().getColumns()) { if (!tileColumn.isPrimaryKey()) { int rowColumnType = tileRow.getRowColumnType(tileColumn .getIndex()); GeoPackageDataType dataType = tileColumn.getDataType(); if (dataType == null) { continue; } switch (dataType) { case TEXT: validateRowColumnType(rowColumnType, ResultUtils.FIELD_TYPE_STRING); if (updatedString == null) { updatedString = UUID.randomUUID().toString(); } if (tileColumn.getMax() != null) { if (updatedLimitedString == null) { if (updatedString.length() > tileColumn .getMax()) { updatedLimitedString = updatedString .substring(0, tileColumn.getMax() .intValue()); } else { updatedLimitedString = updatedString; } } tileRow.setValue(tileColumn.getIndex(), updatedLimitedString); } else { tileRow.setValue(tileColumn.getIndex(), updatedString); } break; case BOOLEAN: validateRowColumnType(rowColumnType, ResultUtils.FIELD_TYPE_INTEGER); if (updatedBoolean == null) { Boolean existingValue = (Boolean) tileRow .getValue(tileColumn.getIndex()); if (existingValue == null) { updatedBoolean = true; } else { updatedBoolean = !existingValue; } } tileRow.setValue(tileColumn.getIndex(), updatedBoolean); break; case TINYINT: validateRowColumnType(rowColumnType, ResultUtils.FIELD_TYPE_INTEGER); if (updatedByte == null) { updatedByte = (byte) (((int) (Math.random() * (Byte.MAX_VALUE + 1))) * (Math .random() < .5 ? 1 : -1)); } tileRow.setValue(tileColumn.getIndex(), updatedByte); break; case SMALLINT: validateRowColumnType(rowColumnType, ResultUtils.FIELD_TYPE_INTEGER); if (updatedShort == null) { updatedShort = (short) (((int) (Math.random() * (Short.MAX_VALUE + 1))) * (Math .random() < .5 ? 1 : -1)); } tileRow.setValue(tileColumn.getIndex(), updatedShort); break; case MEDIUMINT: validateRowColumnType(rowColumnType, ResultUtils.FIELD_TYPE_INTEGER); if (updatedInteger == null) { updatedInteger = (int) (((int) (Math.random() * (Integer.MAX_VALUE + 1))) * (Math .random() < .5 ? 1 : -1)); } tileRow.setValue(tileColumn.getIndex(), updatedInteger); break; case INT: case INTEGER: validateRowColumnType(rowColumnType, ResultUtils.FIELD_TYPE_INTEGER); if (updatedLong == null) { updatedLong = (long) (((int) (Math.random() * (Long.MAX_VALUE + 1))) * (Math .random() < .5 ? 1 : -1)); } tileRow.setValue(tileColumn.getIndex(), updatedLong); break; case FLOAT: validateRowColumnType(rowColumnType, ResultUtils.FIELD_TYPE_FLOAT); if (updatedFloat == null) { updatedFloat = (float) Math.random() * Float.MAX_VALUE; } tileRow.setValue(tileColumn.getIndex(), updatedFloat); break; case DOUBLE: case REAL: validateRowColumnType(rowColumnType, ResultUtils.FIELD_TYPE_FLOAT); if (updatedDouble == null) { updatedDouble = Math.random() * Double.MAX_VALUE; } tileRow.setValue(tileColumn.getIndex(), updatedDouble); break; case BLOB: validateRowColumnType(rowColumnType, ResultUtils.FIELD_TYPE_BLOB); if (updatedBytes == null) { updatedBytes = TestUtils.getAssetFileBytes( testContext, TestConstants.TILE_FILE_NAME); Bitmap bitmap = BitmapConverter .toBitmap(updatedBytes); updatedBitmapWidth = bitmap.getWidth(); updatedBitmapHeight = bitmap.getHeight(); } if (tileColumn.getMax() != null) { if (updatedLimitedBytes == null) { if (updatedBytes.length > tileColumn.getMax()) { updatedLimitedBytes = new byte[tileColumn .getMax().intValue()]; ByteBuffer.wrap(updatedBytes, 0, tileColumn.getMax().intValue()) .get(updatedLimitedBytes); } else { updatedLimitedBytes = updatedBytes; } } tileRow.setValue(tileColumn.getIndex(), updatedLimitedBytes); } else { tileRow.setValue(tileColumn.getIndex(), updatedBytes); } break; default: } } } cursor.close(); TestCase.assertEquals(1, dao.update(tileRow)); long id = tileRow.getId(); TileRow readRow = dao.queryForIdRow(id); TestCase.assertNotNull(readRow); TestCase.assertEquals(originalRow.getId(), readRow.getId()); for (String readColumnName : readRow.getColumnNames()) { TileColumn readTileColumn = readRow.getColumn(readColumnName); if (!readTileColumn.isPrimaryKey()) { switch (readRow.getRowColumnType(readColumnName)) { case ResultUtils.FIELD_TYPE_STRING: if (readTileColumn.getMax() != null) { TestCase.assertEquals(updatedLimitedString, readRow.getValue(readTileColumn.getIndex())); } else { TestCase.assertEquals(updatedString, readRow.getValue(readTileColumn.getIndex())); } break; case ResultUtils.FIELD_TYPE_INTEGER: switch (readTileColumn.getDataType()) { case BOOLEAN: TestCase.assertEquals(updatedBoolean, readRow.getValue(readTileColumn.getIndex())); break; case TINYINT: TestCase.assertEquals(updatedByte, readRow.getValue(readTileColumn.getIndex())); break; case SMALLINT: TestCase.assertEquals(updatedShort, readRow.getValue(readTileColumn.getIndex())); break; case MEDIUMINT: TestCase.assertEquals(updatedInteger, readRow.getValue(readTileColumn.getIndex())); break; case INT: case INTEGER: TestCase.assertEquals(updatedLong, readRow.getValue(readTileColumn.getIndex())); break; default: TestCase.fail("Unexpected integer type: " + readTileColumn.getDataType()); } break; case ResultUtils.FIELD_TYPE_FLOAT: switch (readTileColumn.getDataType()) { case FLOAT: TestCase.assertEquals(updatedFloat, readRow.getValue(readTileColumn.getIndex())); break; case DOUBLE: case REAL: TestCase.assertEquals(updatedDouble, readRow.getValue(readTileColumn.getIndex())); break; default: TestCase.fail("Unexpected integer type: " + readTileColumn.getDataType()); } break; case ResultUtils.FIELD_TYPE_BLOB: if (readTileColumn.getMax() != null) { GeoPackageGeometryDataUtils.compareByteArrays( updatedLimitedBytes, (byte[]) readRow.getValue(readTileColumn .getIndex())); } else { byte[] readBytes = (byte[]) readRow .getValue(readTileColumn.getIndex()); GeoPackageGeometryDataUtils.compareByteArrays( updatedBytes, readBytes); Bitmap bitmap = BitmapConverter .toBitmap(readBytes); TestCase.assertTrue(bitmap.getWidth() > 0); TestCase.assertTrue(bitmap.getHeight() > 0); } break; default: } } } // Explicitly validate the bitmap Bitmap bitmap = readRow.getTileDataBitmap(); TestCase.assertNotNull(bitmap); TestCase.assertEquals(updatedBitmapWidth.intValue(), bitmap.getWidth()); TestCase.assertEquals(updatedBitmapHeight.intValue(), bitmap.getHeight()); } cursor.close(); } /** * Validate the row type * * @param rowColumnType row column type * @param expectedColumnType expected column type */ private static void validateRowColumnType(int rowColumnType, int expectedColumnType) { if (rowColumnType == ResultUtils.FIELD_TYPE_NULL) { TestCase.fail("Tile columns should all non nullable. Expected Column Type: " + expectedColumnType); } else { TestCase.assertEquals(expectedColumnType, rowColumnType); } } /** * Test create * * @param geoPackage GeoPackage * @throws SQLException upon error */ public static void testCreate(GeoPackage geoPackage) throws SQLException { TileMatrixSetDao tileMatrixSetDao = geoPackage.getTileMatrixSetDao(); if (tileMatrixSetDao.isTableExists()) { List<TileMatrixSet> results = tileMatrixSetDao.queryForAll(); results = TestUtils.getRandomList(results, 3); for (TileMatrixSet tileMatrixSet : results) { TileDao dao = geoPackage.getTileDao(tileMatrixSet); TestCase.assertNotNull(dao); TileCursor cursor = dao.queryForAll(); int count = cursor.getCount(); if (count > 0) { // Choose random tile int random = (int) (Math.random() * count); cursor.moveToPosition(random); TileRow tileRow = cursor.getRow(); cursor.close(); // Find the largest zoom level TileMatrixDao tileMatrixDao = geoPackage.getTileMatrixDao(); QueryBuilder<TileMatrix, TileMatrixKey> qb = tileMatrixDao .queryBuilder(); qb.where().eq(TileMatrix.COLUMN_TABLE_NAME, tileMatrixSet.getTableName()); qb.orderBy(TileMatrix.COLUMN_ZOOM_LEVEL, false); PreparedQuery<TileMatrix> query = qb.prepare(); TileMatrix tileMatrix = tileMatrixDao.queryForFirst(query); long highestZoomLevel = tileMatrix.getZoomLevel(); // Create new row from existing long id = tileRow.getId(); tileRow.resetId(); tileRow.setZoomLevel(highestZoomLevel + 1); long newRowId; try { newRowId = dao.create(tileRow); } catch (SQLiteException e) { if (TestUtils.isFutureSQLiteException(e)) { continue; } else { throw e; } } TestCase.assertEquals(newRowId, tileRow.getId()); // Verify original still exists and new was created tileRow = dao.queryForIdRow(id); TestCase.assertNotNull(tileRow); TileRow queryTileRow = dao.queryForIdRow(newRowId); TestCase.assertNotNull(queryTileRow); cursor = dao.queryForAll(); TestCase.assertEquals(count + 1, cursor.getCount()); cursor.close(); // Create new row with copied values from another TileRow newRow = dao.newRow(); for (TileColumn column : dao.getTable().getColumns()) { if (column.isPrimaryKey()) { try { newRow.setValue(column.getName(), 10); TestCase.fail("Set primary key on new row"); } catch (GeoPackageException e) { // Expected } } else { newRow.setValue(column.getName(), tileRow.getValue(column.getName())); } } newRow.setZoomLevel(queryTileRow.getZoomLevel() + 1); long newRowId2; try { newRowId2 = dao.create(newRow); } catch (SQLiteException e) { if (TestUtils.isFutureSQLiteException(e)) { continue; } else { throw e; } } TestCase.assertEquals(newRowId2, newRow.getId()); // Verify new was created TileRow queryTileRow2 = dao.queryForIdRow(newRowId2); TestCase.assertNotNull(queryTileRow2); cursor = dao.queryForAll(); TestCase.assertEquals(count + 2, cursor.getCount()); cursor.close(); // Test copied row TileRow copyRow = queryTileRow2.copy(); for (TileColumn column : dao.getTable().getColumns()) { if (column.getIndex() == queryTileRow2 .getTileDataColumnIndex()) { byte[] tileData1 = queryTileRow2.getTileData(); byte[] tileData2 = copyRow.getTileData(); TestCase.assertNotSame(tileData1, tileData2); GeoPackageGeometryDataUtils.compareByteArrays( tileData1, tileData2); } else { TestCase.assertEquals( queryTileRow2.getValue(column.getName()), copyRow.getValue(column.getName())); } } copyRow.resetId(); copyRow.setZoomLevel(queryTileRow2.getZoomLevel() + 1); long newRowId3 = dao.create(copyRow); TestCase.assertEquals(newRowId3, copyRow.getId()); // Verify new was created TileRow queryTileRow3 = dao.queryForIdRow(newRowId3); TestCase.assertNotNull(queryTileRow3); cursor = dao.queryForAll(); TestCase.assertEquals(count + 3, cursor.getCount()); cursor.close(); for (TileColumn column : dao.getTable().getColumns()) { if (column.isPrimaryKey()) { TestCase.assertNotSame(queryTileRow2.getId(), queryTileRow3.getId()); } else if (column.getIndex() == queryTileRow3 .getZoomLevelColumnIndex()) { TestCase.assertEquals(queryTileRow2.getZoomLevel(), queryTileRow3.getZoomLevel() - 1); } else if (column.getIndex() == queryTileRow3 .getTileDataColumnIndex()) { byte[] tileData1 = queryTileRow2.getTileData(); byte[] tileData2 = queryTileRow3.getTileData(); GeoPackageGeometryDataUtils.compareByteArrays( tileData1, tileData2); } else { TestCase.assertEquals( queryTileRow2.getValue(column.getName()), queryTileRow3.getValue(column.getName())); } } } cursor.close(); } } } /** * Test delete * * @param geoPackage GeoPackage * @throws SQLException upon error */ public static void testDelete(GeoPackage geoPackage) throws SQLException { TileMatrixSetDao tileMatrixSetDao = geoPackage.getTileMatrixSetDao(); if (tileMatrixSetDao.isTableExists()) { List<TileMatrixSet> results = tileMatrixSetDao.queryForAll(); results = TestUtils.getRandomList(results, 3); for (TileMatrixSet tileMatrixSet : results) { TileDao dao = geoPackage.getTileDao(tileMatrixSet); TestCase.assertNotNull(dao); TileCursor cursor = dao.queryForAll(); int count = cursor.getCount(); if (count > 0) { // Choose random tile int random = (int) (Math.random() * count); cursor.moveToPosition(random); TileRow tileRow = cursor.getRow(); cursor.close(); // Delete row try { TestCase.assertEquals(1, dao.delete(tileRow)); } catch (SQLiteException e) { if (TestUtils.isFutureSQLiteException(e)) { continue; } else { throw e; } } // Verify deleted TileRow queryTileRow = dao.queryForIdRow(tileRow.getId()); TestCase.assertNull(queryTileRow); cursor = dao.queryForAll(); TestCase.assertEquals(count - 1, cursor.getCount()); cursor.close(); } cursor.close(); } } } /** * Test getZoomLevel * * @param geoPackage GeoPackage * @throws SQLException upon error */ public static void testGetZoomLevel(GeoPackage geoPackage) throws SQLException { TileMatrixSetDao tileMatrixSetDao = geoPackage.getTileMatrixSetDao(); if (tileMatrixSetDao.isTableExists()) { List<TileMatrixSet> results = tileMatrixSetDao.queryForAll(); results = TestUtils.getRandomList(results, 3); for (TileMatrixSet tileMatrixSet : results) { TileDao dao = geoPackage.getTileDao(tileMatrixSet); List<TileMatrix> tileMatrices = dao.getTileMatrices(); for (TileMatrix tileMatrix : tileMatrices) { double width = tileMatrix.getPixelXSize() * tileMatrix.getTileWidth(); double height = tileMatrix.getPixelYSize() * tileMatrix.getTileHeight(); long zoomLevel = dao.getZoomLevel(width); TestCase.assertEquals(tileMatrix.getZoomLevel(), zoomLevel); zoomLevel = dao.getZoomLevel(width, height); TestCase.assertEquals(tileMatrix.getZoomLevel(), zoomLevel); zoomLevel = dao.getZoomLevel(width + 1); TestCase.assertEquals(tileMatrix.getZoomLevel(), zoomLevel); zoomLevel = dao.getZoomLevel(width + 1, height + 1); TestCase.assertEquals(tileMatrix.getZoomLevel(), zoomLevel); zoomLevel = dao.getZoomLevel(width - 1); TestCase.assertEquals(tileMatrix.getZoomLevel(), zoomLevel); zoomLevel = dao.getZoomLevel(width - 1, height - 1); TestCase.assertEquals(tileMatrix.getZoomLevel(), zoomLevel); } } } } /** * Test queryByRange * * @param geoPackage GeoPackage * @throws SQLException upon error */ public static void testQueryByRange(GeoPackage geoPackage) throws SQLException { TileMatrixSetDao tileMatrixSetDao = geoPackage.getTileMatrixSetDao(); if (tileMatrixSetDao.isTableExists()) { List<TileMatrixSet> results = tileMatrixSetDao.queryForAll(); results = TestUtils.getRandomList(results, 3); for (TileMatrixSet tileMatrixSet : results) { TileDao dao = geoPackage.getTileDao(tileMatrixSet); List<TileMatrix> tileMatrices = dao.getTileMatrices(); long mapMinZoom = Long.MAX_VALUE; long mapMaxZoom = Long.MIN_VALUE; long[] mapZoomRange = dao.getMapZoomRange(); for (TileMatrix tileMatrix : tileMatrices) { double width = tileMatrix.getPixelXSize() * tileMatrix.getTileWidth(); double height = tileMatrix.getPixelYSize() * tileMatrix.getTileHeight(); long zoomLevel = dao.getZoomLevel(width, height); long mapZoom = dao.getMapZoom(tileMatrix.getZoomLevel()); mapMinZoom = Math.min(mapMinZoom, mapZoom); mapMaxZoom = Math.max(mapMaxZoom, mapZoom); BoundingBox setProjectionBoundingBox = tileMatrixSet .getBoundingBox(); BoundingBox setWebMercatorBoundingBox = setProjectionBoundingBox .transform(tileMatrixSet.getProjection() .getTransformation( ProjectionConstants.EPSG_WEB_MERCATOR)); BoundingBox boundingBox = new BoundingBox(-180.0, -90.0, 180.0, 90.0); BoundingBox webMercatorBoundingBox = boundingBox .transform(ProjectionFactory .getProjection( ProjectionConstants.EPSG_WORLD_GEODETIC_SYSTEM) .getTransformation( ProjectionConstants.EPSG_WEB_MERCATOR)); TileGrid tileGrid = TileBoundingBoxUtils.getTileGrid( setWebMercatorBoundingBox, tileMatrix.getMatrixWidth(), tileMatrix.getMatrixHeight(), webMercatorBoundingBox); TileCursor cursor = dao .queryByTileGrid(tileGrid, zoomLevel); int cursorCount = cursor != null ? cursor.getCount() : 0; TileCursor expectedCursor = dao.queryForTile(zoomLevel); TestCase.assertEquals(expectedCursor.getCount(), cursorCount); if (cursor != null) { cursor.close(); } expectedCursor.close(); double maxLon = (360.0 * Math.random()) - 180.0; double minLon = ((maxLon + 180.0) * Math.random()) - 180.0; double maxLat = (180.0 * Math.random()) - 90.0; double minLat = ((maxLon + 90.0) * Math.random()) - 90.0; boundingBox = new BoundingBox(minLon, minLat, maxLon, maxLat); webMercatorBoundingBox = boundingBox .transform(ProjectionFactory .getProjection( ProjectionConstants.EPSG_WORLD_GEODETIC_SYSTEM) .getTransformation( ProjectionConstants.EPSG_WEB_MERCATOR)); tileGrid = TileBoundingBoxUtils.getTileGrid( setWebMercatorBoundingBox, tileMatrix.getMatrixWidth(), tileMatrix.getMatrixHeight(), webMercatorBoundingBox); cursor = dao.queryByTileGrid(tileGrid, zoomLevel); cursorCount = cursor != null ? cursor.getCount() : 0; if (tileGrid != null) { cursorCount = Math.min(100, cursorCount); int count = 0; for (long column = tileGrid.getMinX(); column <= tileGrid .getMaxX(); column++) { for (long row = tileGrid.getMinY(); row <= tileGrid .getMaxY(); row++) { TileRow tileRow = dao.queryForTile(column, row, zoomLevel); if (tileRow != null) { count++; if(count >= cursorCount){ break; } } } if(count >= cursorCount){ break; } } TestCase.assertEquals(count, cursorCount); } else { TestCase.assertEquals(0, cursorCount); } if (cursor != null) { cursor.close(); } } TestCase.assertEquals(mapZoomRange[0], mapMinZoom); TestCase.assertEquals(mapZoomRange[1], mapMaxZoom); } } } /** * Test querying for the bounding box at a tile matrix zoom level * * @param geoPackage GeoPackage * @throws SQLException upon error */ public static void testTileMatrixBoundingBox(GeoPackage geoPackage) throws SQLException { TileMatrixSetDao tileMatrixSetDao = geoPackage.getTileMatrixSetDao(); if (tileMatrixSetDao.isTableExists()) { List<TileMatrixSet> results = tileMatrixSetDao.queryForAll(); results = TestUtils.getRandomList(results, 3); for (TileMatrixSet tileMatrixSet : results) { TileDao dao = geoPackage.getTileDao(tileMatrixSet); TestCase.assertNotNull(dao); BoundingBox totalBoundingBox = tileMatrixSet.getBoundingBox(); TestCase.assertEquals(totalBoundingBox, dao.getBoundingBox()); List<TileMatrix> tileMatrices = dao.getTileMatrices(); for (TileMatrix tileMatrix : tileMatrices) { double xDistance = tileMatrixSet.getMaxX() - tileMatrixSet.getMinX(); double xDistance2 = tileMatrix.getMatrixWidth() * tileMatrix.getTileWidth() * tileMatrix.getPixelXSize(); TestCase.assertEquals(xDistance, xDistance2, .0000000001); double yDistance = tileMatrixSet.getMaxY() - tileMatrixSet.getMinY(); double yDistance2 = tileMatrix.getMatrixHeight() * tileMatrix.getTileHeight() * tileMatrix.getPixelYSize(); TestCase.assertEquals(yDistance, yDistance2, .0000000001); long zoomLevel = tileMatrix.getZoomLevel(); int count = dao.count(zoomLevel); TileGrid totalTileGrid = dao.getTileGrid(zoomLevel); TileGrid tileGrid = dao.queryForTileGrid(zoomLevel); BoundingBox boundingBox = dao.getBoundingBox(zoomLevel); if (totalTileGrid.equals(tileGrid)) { TestCase.assertEquals(totalBoundingBox, boundingBox); } else { TestCase.assertTrue(totalBoundingBox.getMinLongitude() <= boundingBox .getMinLongitude()); TestCase.assertTrue(totalBoundingBox.getMaxLongitude() >= boundingBox .getMaxLongitude()); TestCase.assertTrue(totalBoundingBox.getMinLatitude() <= boundingBox .getMinLatitude()); TestCase.assertTrue(totalBoundingBox.getMaxLatitude() >= boundingBox .getMaxLatitude()); } boolean minYDeleted = false; boolean maxYDeleted = false; boolean minXDeleted = false; boolean maxXDeleted = false; int deleted = 0; if (tileMatrix.getMatrixHeight() > 1 || tileMatrix.getMatrixWidth() > 1) { for (int column = 0; column < tileMatrix.getMatrixWidth(); column++) { int expectedDelete = dao.queryForTile(column, 0, zoomLevel) != null ? 1 : 0; TestCase.assertEquals(expectedDelete, dao.deleteTile(column, 0, zoomLevel)); if (expectedDelete > 0) { minYDeleted = true; } deleted += expectedDelete; expectedDelete = dao .queryForTile(column, tileMatrix.getMatrixHeight() - 1, zoomLevel) != null ? 1 : 0; TestCase.assertEquals(expectedDelete, dao .deleteTile(column, tileMatrix.getMatrixHeight() - 1, zoomLevel)); if (expectedDelete > 0) { maxYDeleted = true; } deleted += expectedDelete; } for (int row = 1; row < tileMatrix.getMatrixHeight() - 1; row++) { int expectedDelete = dao.queryForTile(0, row, zoomLevel) != null ? 1 : 0; TestCase.assertEquals(expectedDelete, dao.deleteTile(0, row, zoomLevel)); if (expectedDelete > 0) { minXDeleted = true; } deleted += expectedDelete; expectedDelete = dao.queryForTile( tileMatrix.getMatrixWidth() - 1, row, zoomLevel) != null ? 1 : 0; TestCase.assertEquals(expectedDelete, dao .deleteTile( tileMatrix.getMatrixWidth() - 1, row, zoomLevel)); if (expectedDelete > 0) { maxXDeleted = true; } deleted += expectedDelete; } } else { TestCase.assertEquals(1, dao.deleteTile(0, 0, zoomLevel)); deleted++; } int updatedCount = dao.count(zoomLevel); TestCase.assertEquals(count - deleted, updatedCount); TileGrid updatedTileGrid = dao.queryForTileGrid(zoomLevel); BoundingBox updatedBoundingBox = dao.getBoundingBox(zoomLevel); if (updatedCount == 0 || (tileMatrix.getMatrixHeight() <= 2 && tileMatrix .getMatrixWidth() <= 2)) { TestCase.assertNull(updatedTileGrid); TestCase.assertNull(updatedBoundingBox); } else { TestCase.assertNotNull(updatedTileGrid); TestCase.assertNotNull(updatedBoundingBox); if (minXDeleted || minYDeleted || maxXDeleted || maxYDeleted) { TestCase.assertTrue(updatedTileGrid.getMinX() >= tileGrid .getMinX()); TestCase.assertTrue(updatedTileGrid.getMinY() >= tileGrid .getMinY()); TestCase.assertTrue(updatedTileGrid.getMaxX() <= tileGrid .getMaxX()); TestCase.assertTrue(updatedTileGrid.getMaxY() <= tileGrid .getMaxY()); } else { TestCase.assertEquals(tileGrid.getMinX(), updatedTileGrid.getMinX()); TestCase.assertEquals(tileGrid.getMinY(), updatedTileGrid.getMinY()); TestCase.assertEquals(tileGrid.getMaxX(), updatedTileGrid.getMaxX()); TestCase.assertEquals(tileGrid.getMaxY(), updatedTileGrid.getMaxY()); } BoundingBox tileGridBoundingBox = TileBoundingBoxUtils.getBoundingBox(totalBoundingBox, tileMatrix, updatedTileGrid); TestCase.assertEquals(tileGridBoundingBox, updatedBoundingBox); } } } } } static boolean threadedTileDaoError = false; /** * Test threaded tile dao * * @param geoPackage GeoPackage * @throws SQLException upon error */ public static void testThreadedTileDao(final GeoPackage geoPackage) throws SQLException { final int threads = 30; final int attemptsPerThread = 50; TileMatrixSetDao tileMatrixSetDao = geoPackage.getTileMatrixSetDao(); if (tileMatrixSetDao.isTableExists()) { List<TileMatrixSet> results = tileMatrixSetDao.queryForAll(); results = TestUtils.getRandomList(results, 3); for (TileMatrixSet tileMatrixSet : results) { threadedTileDaoError = false; final String tableName = tileMatrixSet.getTableName(); Runnable task = new Runnable() { @Override public void run() { for (int i = 0; i < attemptsPerThread; i++) { try { ContentsDao contentsDao = geoPackage .getContentsDao(); Contents contents = contentsDao .queryForId(tableName); if (contents == null) { throw new Exception( "Contents was null, table name: " + tableName); } TileDao dao = geoPackage.getTileDao(tableName); if (dao == null) { throw new Exception( "Tile DAO was null, table name: " + tableName); } } catch (Exception e) { threadedTileDaoError = true; e.printStackTrace(); break; } } } }; ExecutorService executor = Executors .newFixedThreadPool(threads); for (int i = 0; i < threads; i++) { executor.submit(task); } executor.shutdown(); try { executor.awaitTermination(60, TimeUnit.SECONDS); } catch (InterruptedException e) { e.printStackTrace(); TestCase.fail("Waiting for threads terminated: " + e.getMessage()); } if (threadedTileDaoError) { TestCase.fail("Error occurred during threading"); } } } } }
/* * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://duracloud.org/license/ */ package org.duracloud.duradmin.spaces.controller; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import javax.servlet.ServletContext; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.validation.Valid; import org.apache.http.HttpStatus; import org.duracloud.client.ContentStore; import org.duracloud.client.ContentStoreManager; import org.duracloud.client.StoreCaller; import org.duracloud.common.model.AclType; import org.duracloud.common.util.ExtendedIteratorCounterThread; import org.duracloud.duradmin.domain.Space; import org.duracloud.duradmin.domain.SpaceProperties; import org.duracloud.duradmin.util.SpaceUtil; import org.duracloud.error.ContentStoreException; import org.duracloud.mill.db.repo.JpaBitIntegrityReportRepo; import org.duracloud.reportdata.bitintegrity.BitIntegrityReportProperties; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.security.core.Authentication; import org.springframework.security.core.context.SecurityContextHolder; import org.springframework.stereotype.Controller; import org.springframework.validation.BindingResult; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.servlet.ModelAndView; /** * @author Daniel Bernstein */ @Controller @RequestMapping("/spaces/space") public class SpaceController { protected final Logger log = LoggerFactory.getLogger(getClass()); private ContentStoreManager contentStoreManager; private JpaBitIntegrityReportRepo reportRepo; private String adminSpaceId; @Autowired public SpaceController( @Qualifier("adminSpaceId") String adminSpaceId, @Qualifier("bitIntegrityReportRepo") JpaBitIntegrityReportRepo reportRepo, @Qualifier("contentStoreManager") ContentStoreManager contentStoreManager) { this.adminSpaceId = adminSpaceId; this.reportRepo = reportRepo; this.contentStoreManager = contentStoreManager; } @RequestMapping(value = "", method = RequestMethod.GET) public ModelAndView get(HttpServletRequest request, HttpServletResponse response, @Valid Space space, BindingResult result) throws Exception { try { String prefix = request.getParameter("prefix"); if (prefix != null) { prefix = ("".equals(prefix.trim()) ? null : prefix); } String marker = request.getParameter("marker"); ContentStore contentStore = contentStoreManager.getContentStore(space.getStoreId()); org.duracloud.domain.Space cloudSpace = contentStore.getSpace(space.getSpaceId(), prefix, 200, marker); ContentStore contentStoreWithoutRetries = contentStoreManager.getContentStore(space.getStoreId(), 0); populateSpace(space, cloudSpace, contentStoreWithoutRetries); populateSpaceCount(space, request); if (space.isMillDbEnabled()) { populateBitIntegrityResults(space, contentStore); } return createModel(space); } catch (ContentStoreException ex) { ex.printStackTrace(); response.setStatus(HttpStatus.SC_NOT_FOUND); return createModel(null); } } private void populateBitIntegrityResults(Space space, ContentStore contentStore) { try { BitIntegrityReportProperties bitReportProps = contentStore.getBitIntegrityReportProperties(space.getSpaceId()); if (bitReportProps == null) { log.warn("No bit report properties found for space {}", space.getSpaceId()); } else { space.setBitIntegrityReportProperties(bitReportProps); } } catch (Exception ex) { log.error("failed to populate bit integrity results due to error:" + ex.getMessage(), ex); } } private void populateSpace(Space space, org.duracloud.domain.Space cloudSpace, ContentStore contentStore) throws ContentStoreException { SpaceUtil.populateSpace(space, cloudSpace, contentStore, getAuthentication()); String primaryStoreId = contentStoreManager.getPrimaryContentStore().getStoreId(); boolean primary = primaryStoreId.equals(space.getStoreId()); space.setPrimaryStorageProvider(primary); } private void populateSpaceCount(Space space, HttpServletRequest request) throws Exception { //flush space count cache if (request.getParameterMap().containsKey("recount")) { expireItemCount(request, space); } String countStr = space.getProperties().getCount(); if (countStr.endsWith("+")) { setItemCount(space, request); } else { space.setItemCount(Long.valueOf(space.getProperties().getCount())); } } private void setItemCount(final Space space, HttpServletRequest request) throws ContentStoreException { String key = formatItemCountCacheKey(space); final ServletContext appContext = request.getSession().getServletContext(); ItemCounter listener = (ItemCounter) appContext.getAttribute(key); space.setItemCount(new Long(-1)); if (listener != null) { if (listener.isCountComplete()) { space.setItemCount(listener.getCount()); } else { SpaceProperties properties = space.getProperties(); Long interCount = listener.getIntermediaryCount(); if (interCount == null) { interCount = 0l; } properties.setCount(String.valueOf(interCount) + "+"); space.setProperties(properties); } } else { final ItemCounter itemCounterListener = new ItemCounter(); appContext.setAttribute(key, itemCounterListener); final ContentStore contentStore = contentStoreManager.getContentStore(space.getStoreId()); final StoreCaller<Iterator<String>> caller = new StoreCaller<Iterator<String>>() { protected Iterator<String> doCall() throws ContentStoreException { return contentStore.getSpaceContents(space.getSpaceId()); } public String getLogMessage() { return "Error calling contentStore.getSpaceContents() for: " + space.getSpaceId(); } }; new Thread(new Runnable() { public void run() { ExtendedIteratorCounterThread runnable = new ExtendedIteratorCounterThread(caller.call(), itemCounterListener); runnable.run(); } }).start(); } } private String formatItemCountCacheKey(Space space) { return space.getStoreId() + "/" + space.getSpaceId() + "/itemCountListener"; } private void expireItemCount(HttpServletRequest request, Space space) { String key = formatItemCountCacheKey(space); request.getSession().getServletContext().removeAttribute(key); } private Authentication getAuthentication() { return (Authentication) SecurityContextHolder.getContext().getAuthentication(); } @RequestMapping(value = "", method = RequestMethod.POST) public ModelAndView addSpace(HttpServletRequest request, HttpServletResponse response, @Valid Space space, BindingResult result) throws Exception { String spaceId = space.getSpaceId(); ContentStore contentStore = getContentStore(space); contentStore.createSpace(spaceId); if ("true".equals(request.getParameter("publicFlag"))) { Map<String, AclType> acls = new HashMap<String, AclType>(); acls.put("group-public", AclType.READ); contentStore.setSpaceACLs(spaceId, acls); } populateSpace(space, contentStore.getSpace(spaceId, null, 0, null), contentStore); return createModel(space); } @RequestMapping(value = "/delete", method = RequestMethod.POST) public ModelAndView delete(HttpServletRequest request, HttpServletResponse response, Space space, BindingResult result) throws Exception { String spaceId = space.getSpaceId(); ContentStore contentStore = getContentStore(space); contentStore.deleteSpace(spaceId); return createModel(space); } private ModelAndView createModel(Space space) { return new ModelAndView("jsonView", "space", space); } protected ContentStore getContentStore(Space space) throws ContentStoreException { return contentStoreManager.getContentStore(space.getStoreId()); } }
package com.bison; import android.content.Context; import android.graphics.Canvas; import android.graphics.ColorFilter; import android.graphics.Paint; import android.graphics.PixelFormat; import android.graphics.Point; import android.graphics.Rect; import android.graphics.drawable.Drawable; public class ChromeDrawable extends Drawable implements Drawable.Callback { // constants private static final int MAX_LEVEL = 10000; private static final int CENT_LEVEL = MAX_LEVEL / 2; private static final int MID_LEVEL = CENT_LEVEL / 2; private static final int ALPHA_OPAQUE = 255; private static final int ACCELERATION_LEVEL = 2; // default private int mAlpha = ALPHA_OPAQUE; private ColorFilter mColorFilter; // points and paints private Point[] mArrowPoints; private Paint mPaint1; private Paint mPaint2; private Paint mPaint3; private Paint mPaint4; private double unit; private int width, x_beg, y_beg, x_end, y_end, offset; // speed related private int acceleration = ACCELERATION_LEVEL; private double distance = 0.5 * ACCELERATION_LEVEL * MID_LEVEL * MID_LEVEL; private double max_speed; // set in setAcceleration(...); private double offsetPercentage; // top color var private int colorSign; private ProgressStates currentProgressStates = ProgressStates.GREEN_TOP; private enum ProgressStates { GREEN_TOP, YELLOW_TOP, RED_TOP, BLUE_TOP } public ChromeDrawable(int[] colors) { initCirclesProgress(colors); } private void initCirclesProgress(int[] colors) { //init Paint colors initColors(colors); // init alpha and color filter setAlpha(mAlpha); setColorFilter(mColorFilter); // offset percentage setAcceleration(ACCELERATION_LEVEL); offsetPercentage = 0; // init colorSign colorSign = 1; // |= 1, |= 2, |= 4, |= 8 --> 0xF } private void initColors(int[] colors) { // red circle, left up mPaint1 = new Paint(Paint.ANTI_ALIAS_FLAG); mPaint1.setColor(colors[0]); mPaint1.setAntiAlias(true); // blue circle, right down mPaint2 = new Paint(Paint.ANTI_ALIAS_FLAG); mPaint2.setColor(colors[1]); mPaint2.setAntiAlias(true); // yellow circle, left down mPaint3 = new Paint(Paint.ANTI_ALIAS_FLAG); mPaint3.setColor(colors[2]); mPaint3.setAntiAlias(true); // green circle, right up mPaint4 = new Paint(Paint.ANTI_ALIAS_FLAG); mPaint4.setColor(colors[3]); mPaint4.setAntiAlias(true); } @Override protected void onBoundsChange(Rect bounds) { super.onBoundsChange(bounds); measureCircleProgress(bounds.width(), bounds.height()); } @Override protected boolean onLevelChange(int level) { // calc one offset data is enough // 0.5 * a * t^2 / mCenterPoint.x = level / sideLevel // t from 0 to 10,000, so divided into 4 parts. // the ACCELERATION_LEVEL defines how many divisions in 10000 levels level %= MAX_LEVEL / acceleration; final int temp_level = level % (MID_LEVEL / acceleration); final int ef_width = (int)(unit * 3.0); // effective width if(level < CENT_LEVEL / acceleration) { // go if(level < MID_LEVEL / acceleration) { // set colorSign if(colorSign == 0xF) { changeTopColor(); colorSign = 1; } // from beg to mid offsetPercentage = 0.5 * acceleration * temp_level * temp_level / distance; offset = (int)(offsetPercentage * ef_width / 2); // x and y direction offset } else { // set colorSign colorSign |= 2; // from mid to end offsetPercentage = (max_speed * temp_level - 0.5 * acceleration * temp_level * temp_level) / distance + 1.0; offset = (int)(offsetPercentage * ef_width / 2); // x and y direction offset } } else { // back if(level < (CENT_LEVEL + MID_LEVEL) / acceleration) { // set colorSign if(colorSign == 0x3) { changeTopColor(); colorSign |= 4; } // from end to mid offsetPercentage = 0.5 * acceleration * temp_level * temp_level / distance; offset = (int)(ef_width - offsetPercentage * ef_width / 2); // x and y direction offset } else { // set colorSign colorSign |= 8; // from mid to beg offsetPercentage = (max_speed * temp_level - 0.5 * acceleration * temp_level * temp_level) / distance + 1.0; offsetPercentage = offsetPercentage == 1.0 ? 2.0 : offsetPercentage; offset = (int)(ef_width - offsetPercentage * ef_width / 2); // x and y direction offset } } mArrowPoints[0].set((int)unit+x_beg+offset, (int)unit+y_beg+offset); // mPaint1, left up mArrowPoints[1].set((int)(unit*4.0)+x_beg-offset, (int)(unit*4.0)+y_beg-offset); // mPaint2, right down mArrowPoints[2].set((int)unit+x_beg+offset, (int)(unit*4.0)+y_beg-offset); // mPaint3, left down mArrowPoints[3].set((int)(unit*4.0)+x_beg-offset, (int)unit+y_beg+offset); // mPaint4, right up return true; } private void changeTopColor() { switch(currentProgressStates){ case GREEN_TOP: currentProgressStates = ProgressStates.YELLOW_TOP; break; case YELLOW_TOP: currentProgressStates = ProgressStates.RED_TOP; break; case RED_TOP: currentProgressStates = ProgressStates.BLUE_TOP; break; case BLUE_TOP: currentProgressStates = ProgressStates.GREEN_TOP; break; } } @Override public void draw(Canvas canvas) { // draw circles if(currentProgressStates != ProgressStates.RED_TOP) canvas.drawCircle(mArrowPoints[0].x, mArrowPoints[0].y, (float)unit, mPaint1); if(currentProgressStates != ProgressStates.BLUE_TOP) canvas.drawCircle(mArrowPoints[1].x, mArrowPoints[1].y, (float)unit, mPaint2); if(currentProgressStates != ProgressStates.YELLOW_TOP) canvas.drawCircle(mArrowPoints[2].x, mArrowPoints[2].y, (float)unit, mPaint3); if(currentProgressStates != ProgressStates.GREEN_TOP) canvas.drawCircle(mArrowPoints[3].x, mArrowPoints[3].y, (float)unit, mPaint4); // draw the top one switch(currentProgressStates){ case GREEN_TOP: canvas.drawCircle(mArrowPoints[3].x, mArrowPoints[3].y, (float)unit, mPaint4); break; case YELLOW_TOP: canvas.drawCircle(mArrowPoints[2].x, mArrowPoints[2].y, (float)unit, mPaint3); break; case RED_TOP: canvas.drawCircle(mArrowPoints[0].x, mArrowPoints[0].y, (float)unit, mPaint1); break; case BLUE_TOP: canvas.drawCircle(mArrowPoints[1].x, mArrowPoints[1].y, (float)unit, mPaint2); break; } } private void measureCircleProgress(int width, int height) { // get min edge as width if(width > height) { // use height this.width = height - 1; // minus 1 to avoid "3/2=1" x_beg = (width - height) / 2 + 1; y_beg = 1; x_end = x_beg + this.width; y_end = this.width; } else { //use width this.width = width - 1; x_beg = 1; y_beg = (height - width) / 2 + 1; x_end = this.width; y_end = y_beg + this.width; } unit = (double)this.width / 5.0; // init the original position, and then set position by offsets mArrowPoints = new Point[4]; mArrowPoints[0] = new Point((int)unit+x_beg, (int)unit+y_beg); // mPaint1, left up mArrowPoints[1] = new Point((int)(unit*4.0)+x_beg, (int)(unit*4.0)+y_beg); // mPaint2, right down mArrowPoints[2] = new Point((int)unit+x_beg, (int)(unit*4.0)+y_beg); // mPaint3, left down mArrowPoints[3] = new Point((int)(unit*4.0)+x_beg, (int)unit+y_beg); // mPaint4, right up } public void setAcceleration(int acceleration) { this.acceleration = acceleration; distance = 0.5 * acceleration * (MID_LEVEL / acceleration) * (MID_LEVEL / acceleration); max_speed = acceleration * (MID_LEVEL / acceleration); } @Override public void setAlpha(int alpha) { mPaint1.setAlpha(alpha); mPaint2.setAlpha(alpha); mPaint3.setAlpha(alpha); mPaint4.setAlpha(alpha); } @Override public void setColorFilter(ColorFilter cf) { mColorFilter = cf; mPaint1.setColorFilter(cf); mPaint2.setColorFilter(cf); mPaint3.setColorFilter(cf); mPaint4.setColorFilter(cf); } @Override public int getOpacity() { return PixelFormat.TRANSLUCENT; } @Override public void invalidateDrawable(Drawable who) { final Callback callback = getCallback(); if (callback != null) { callback.invalidateDrawable(this); } } @Override public Callback getCallback() { return super.getCallback(); } @Override public void scheduleDrawable(Drawable who, Runnable what, long when) { final Callback callback = getCallback(); if (callback != null) { callback.scheduleDrawable(this, what, when); } } @Override public void unscheduleDrawable(Drawable who, Runnable what) { final Callback callback = getCallback(); if (callback != null) { callback.unscheduleDrawable(this, what); } } public static class Builder { private int[] mColors; public Builder(Context context){ initDefaults(context); } private void initDefaults(Context context) { //Default values mColors = new int[]{0xFFC93437,0xFF375BF1,0xFFF7D23E,0xFF34A350}; return; } public Builder colors(int[] colors) { if (colors == null || colors.length == 0) { throw new IllegalArgumentException("Your color array must contains at least 4 values"); } mColors = colors; return this; } public ChromeDrawable build() { return new ChromeDrawable(mColors); } } }
/* ****************************************************************************** * Copyright (C) 2003-2011, International Business Machines Corporation and * * others. All Rights Reserved. * ****************************************************************************** */ package com.ibm.icu.impl; import java.util.Collections; import java.util.Comparator; import java.util.Iterator; import java.util.Map; import java.util.TreeMap; import com.ibm.icu.impl.locale.AsciiUtil; /** * Utility class to parse and normalize locale ids (including POSIX style) */ public final class LocaleIDParser { /** * Char array representing the locale ID. */ private char[] id; /** * Current position in {@link #id} (while parsing). */ private int index; /** * Temporary buffer for parsed sections of data. */ private StringBuilder buffer; // um, don't handle POSIX ids unless we request it. why not? well... because. private boolean canonicalize; private boolean hadCountry; // used when canonicalizing Map<String, String> keywords; String baseName; /** * Parsing constants. */ private static final char KEYWORD_SEPARATOR = '@'; private static final char HYPHEN = '-'; private static final char KEYWORD_ASSIGN = '='; private static final char COMMA = ','; private static final char ITEM_SEPARATOR = ';'; private static final char DOT = '.'; private static final char UNDERSCORE = '_'; public LocaleIDParser(String localeID) { this(localeID, false); } public LocaleIDParser(String localeID, boolean canonicalize) { id = localeID.toCharArray(); index = 0; buffer = new StringBuilder(id.length + 5); this.canonicalize = canonicalize; } private void reset() { index = 0; buffer = new StringBuilder(id.length + 5); } // utilities for working on text in the buffer /** * Append c to the buffer. */ private void append(char c) { buffer.append(c); } private void addSeparator() { append(UNDERSCORE); } /** * Returns the text in the buffer from start to blen as a String. */ private String getString(int start) { return buffer.substring(start); } /** * Set the length of the buffer to pos, then append the string. */ private void set(int pos, String s) { buffer.delete(pos, buffer.length()); buffer.insert(pos, s); } /** * Append the string to the buffer. */ private void append(String s) { buffer.append(s); } // utilities for parsing text out of the id /** * Character to indicate no more text is available in the id. */ private static final char DONE = '\uffff'; /** * Returns the character at index in the id, and advance index. The returned character * is DONE if index was at the limit of the buffer. The index is advanced regardless * so that decrementing the index will always 'unget' the last character returned. */ private char next() { if (index == id.length) { index++; return DONE; } return id[index++]; } /** * Advance index until the next terminator or id separator, and leave it there. */ private void skipUntilTerminatorOrIDSeparator() { while (!isTerminatorOrIDSeparator(next())); --index; } /** * Returns true if the character at index in the id is a terminator. */ private boolean atTerminator() { return index >= id.length || isTerminator(id[index]); } /** * Returns true if the character is a terminator (keyword separator, dot, or DONE). * Dot is a terminator because of the POSIX form, where dot precedes the codepage. */ private boolean isTerminator(char c) { // always terminate at DOT, even if not handling POSIX. It's an error... return c == KEYWORD_SEPARATOR || c == DONE || c == DOT; } /** * Returns true if the character is a terminator or id separator. */ private boolean isTerminatorOrIDSeparator(char c) { return c == UNDERSCORE || c == HYPHEN || isTerminator(c); } /** * Returns true if the start of the buffer has an experimental or private language * prefix, the pattern '[ixIX][-_].' shows the syntax checked. */ private boolean haveExperimentalLanguagePrefix() { if (id.length > 2) { char c = id[1]; if (c == HYPHEN || c == UNDERSCORE) { c = id[0]; return c == 'x' || c == 'X' || c == 'i' || c == 'I'; } } return false; } /** * Returns true if a value separator occurs at or after index. */ private boolean haveKeywordAssign() { // assume it is safe to start from index for (int i = index; i < id.length; ++i) { if (id[i] == KEYWORD_ASSIGN) { return true; } } return false; } /** * Advance index past language, and accumulate normalized language code in buffer. * Index must be at 0 when this is called. Index is left at a terminator or id * separator. Returns the start of the language code in the buffer. */ private int parseLanguage() { int startLength = buffer.length(); if (haveExperimentalLanguagePrefix()) { append(AsciiUtil.toLower(id[0])); append(HYPHEN); index = 2; } char c; while(!isTerminatorOrIDSeparator(c = next())) { append(AsciiUtil.toLower(c)); } --index; // unget if (buffer.length() - startLength == 3) { String lang = LocaleIDs.threeToTwoLetterLanguage(getString(0)); if (lang != null) { set(0, lang); } } return 0; } /** * Advance index past language. Index must be at 0 when this is called. Index * is left at a terminator or id separator. */ private void skipLanguage() { if (haveExperimentalLanguagePrefix()) { index = 2; } skipUntilTerminatorOrIDSeparator(); } /** * Advance index past script, and accumulate normalized script in buffer. * Index must be immediately after the language. * If the item at this position is not a script (is not four characters * long) leave index and buffer unchanged. Otherwise index is left at * a terminator or id separator. Returns the start of the script code * in the buffer (this may be equal to the buffer length, if there is no * script). */ private int parseScript() { if (!atTerminator()) { int oldIndex = index; // save original index ++index; int oldBlen = buffer.length(); // get before append hyphen, if we truncate everything is undone char c; boolean firstPass = true; while(!isTerminatorOrIDSeparator(c = next()) && AsciiUtil.isAlpha(c)) { if (firstPass) { addSeparator(); append(AsciiUtil.toUpper(c)); firstPass = false; } else { append(AsciiUtil.toLower(c)); } } --index; // unget /* If it's not exactly 4 characters long, then it's not a script. */ if (index - oldIndex != 5) { // +1 to account for separator index = oldIndex; buffer.delete(oldBlen, buffer.length()); } else { oldBlen++; // index past hyphen, for clients who want to extract just the script } return oldBlen; } return buffer.length(); } /** * Advance index past script. * Index must be immediately after the language and IDSeparator. * If the item at this position is not a script (is not four characters * long) leave index. Otherwise index is left at a terminator or * id separator. */ private void skipScript() { if (!atTerminator()) { int oldIndex = index; ++index; char c; while (!isTerminatorOrIDSeparator(c = next()) && AsciiUtil.isAlpha(c)); --index; if (index - oldIndex != 5) { // +1 to account for separator index = oldIndex; } } } /** * Advance index past country, and accumulate normalized country in buffer. * Index must be immediately after the script (if there is one, else language) * and IDSeparator. Return the start of the country code in the buffer. */ private int parseCountry() { if (!atTerminator()) { int oldIndex = index; ++index; int oldBlen = buffer.length(); char c; boolean firstPass = true; while (!isTerminatorOrIDSeparator(c = next())) { if (firstPass) { // first, add hyphen hadCountry = true; // we have a country, let variant parsing know addSeparator(); ++oldBlen; // increment past hyphen firstPass = false; } append(AsciiUtil.toUpper(c)); } --index; // unget int charsAppended = buffer.length() - oldBlen; if (charsAppended == 0) { // Do nothing. } else if (charsAppended < 2 || charsAppended > 3) { // It's not a country, so return index and blen to // their previous values. index = oldIndex; --oldBlen; buffer.delete(oldBlen, buffer.length()); hadCountry = false; } else if (charsAppended == 3) { String region = LocaleIDs.threeToTwoLetterRegion(getString(oldBlen)); if (region != null) { set(oldBlen, region); } } return oldBlen; } return buffer.length(); } /** * Advance index past country. * Index must be immediately after the script (if there is one, else language) * and IDSeparator. */ private void skipCountry() { if (!atTerminator()) { if (id[index] == UNDERSCORE || id[index] == HYPHEN) { ++index; } /* * Save the index point after the separator, since the format * requires two separators if the country is not present. */ int oldIndex = index; skipUntilTerminatorOrIDSeparator(); int charsSkipped = index - oldIndex; if (charsSkipped < 2 || charsSkipped > 3) { index = oldIndex; } } } /** * Advance index past variant, and accumulate normalized variant in buffer. This ignores * the codepage information from POSIX ids. Index must be immediately after the country * or script. Index is left at the keyword separator or at the end of the text. Return * the start of the variant code in the buffer. * * In standard form, we can have the following forms: * ll__VVVV * ll_CC_VVVV * ll_Ssss_VVVV * ll_Ssss_CC_VVVV * * This also handles POSIX ids, which can have the following forms (pppp is code page id): * ll_CC.pppp --> ll_CC * ll_CC.pppp@VVVV --> ll_CC_VVVV * ll_CC@VVVV --> ll_CC_VVVV * * We identify this use of '@' in POSIX ids by looking for an '=' following * the '@'. If there is one, we consider '@' to start a keyword list, instead of * being part of a POSIX id. * * Note: since it was decided that we want an option to not handle POSIX ids, this * becomes a bit more complex. */ private int parseVariant() { int oldBlen = buffer.length(); boolean start = true; boolean needSeparator = true; boolean skipping = false; char c; boolean firstPass = true; while ((c = next()) != DONE) { if (c == DOT) { start = false; skipping = true; } else if (c == KEYWORD_SEPARATOR) { if (haveKeywordAssign()) { break; } skipping = false; start = false; needSeparator = true; // add another underscore if we have more text } else if (start) { start = false; if (c != UNDERSCORE && c != HYPHEN) { index--; } } else if (!skipping) { if (needSeparator) { needSeparator = false; if (firstPass && !hadCountry) { // no country, we'll need two addSeparator(); ++oldBlen; // for sure } addSeparator(); if (firstPass) { // only for the first separator ++oldBlen; firstPass = false; } } c = AsciiUtil.toUpper(c); if (c == HYPHEN || c == COMMA) { c = UNDERSCORE; } append(c); } } --index; // unget return oldBlen; } // no need for skipvariant, to get the keywords we'll just scan directly for // the keyword separator /** * Returns the normalized language id, or the empty string. */ public String getLanguage() { reset(); return getString(parseLanguage()); } /** * Returns the normalized script id, or the empty string. */ public String getScript() { reset(); skipLanguage(); return getString(parseScript()); } /** * return the normalized country id, or the empty string. */ public String getCountry() { reset(); skipLanguage(); skipScript(); return getString(parseCountry()); } /** * Returns the normalized variant id, or the empty string. */ public String getVariant() { reset(); skipLanguage(); skipScript(); skipCountry(); return getString(parseVariant()); } /** * Returns the language, script, country, and variant as separate strings. */ public String[] getLanguageScriptCountryVariant() { reset(); return new String[] { getString(parseLanguage()), getString(parseScript()), getString(parseCountry()), getString(parseVariant()) }; } public void setBaseName(String baseName) { this.baseName = baseName; } public void parseBaseName() { if (baseName != null) { set(0, baseName); } else { reset(); parseLanguage(); parseScript(); parseCountry(); parseVariant(); // catch unwanted trailing underscore after country if there was no variant int len = buffer.length(); if (len > 0 && buffer.charAt(len - 1) == UNDERSCORE) { buffer.deleteCharAt(len - 1); } } } /** * Returns the normalized base form of the locale id. The base * form does not include keywords. */ public String getBaseName() { if (baseName != null) { return baseName; } parseBaseName(); return getString(0); } /** * Returns the normalized full form of the locale id. The full * form includes keywords if they are present. */ public String getName() { parseBaseName(); parseKeywords(); return getString(0); } // keyword utilities /** * If we have keywords, advance index to the start of the keywords and return true, * otherwise return false. */ private boolean setToKeywordStart() { for (int i = index; i < id.length; ++i) { if (id[i] == KEYWORD_SEPARATOR) { if (canonicalize) { for (int j = ++i; j < id.length; ++j) { // increment i past separator for return if (id[j] == KEYWORD_ASSIGN) { index = i; return true; } } } else { if (++i < id.length) { index = i; return true; } } break; } } return false; } private static boolean isDoneOrKeywordAssign(char c) { return c == DONE || c == KEYWORD_ASSIGN; } private static boolean isDoneOrItemSeparator(char c) { return c == DONE || c == ITEM_SEPARATOR; } private String getKeyword() { int start = index; while (!isDoneOrKeywordAssign(next())) { } --index; return AsciiUtil.toLowerString(new String(id, start, index-start).trim()); } private String getValue() { int start = index; while (!isDoneOrItemSeparator(next())) { } --index; return new String(id, start, index-start).trim(); // leave case alone } private Comparator<String> getKeyComparator() { final Comparator<String> comp = new Comparator<String>() { public int compare(String lhs, String rhs) { return lhs.compareTo(rhs); } }; return comp; } /** * Returns a map of the keywords and values, or null if there are none. */ public Map<String, String> getKeywordMap() { if (keywords == null) { TreeMap<String, String> m = null; if (setToKeywordStart()) { // trim spaces and convert to lower case, both keywords and values. do { String key = getKeyword(); if (key.length() == 0) { break; } char c = next(); if (c != KEYWORD_ASSIGN) { // throw new IllegalArgumentException("key '" + key + "' missing a value."); if (c == DONE) { break; } else { continue; } } String value = getValue(); if (value.length() == 0) { // throw new IllegalArgumentException("key '" + key + "' missing a value."); continue; } if (m == null) { m = new TreeMap<String, String>(getKeyComparator()); } else if (m.containsKey(key)) { // throw new IllegalArgumentException("key '" + key + "' already has a value."); continue; } m.put(key, value); } while (next() == ITEM_SEPARATOR); } keywords = m != null ? m : Collections.<String, String>emptyMap(); } return keywords; } /** * Parse the keywords and return start of the string in the buffer. */ private int parseKeywords() { int oldBlen = buffer.length(); Map<String, String> m = getKeywordMap(); if (!m.isEmpty()) { boolean first = true; for (Map.Entry<String, String> e : m.entrySet()) { append(first ? KEYWORD_SEPARATOR : ITEM_SEPARATOR); first = false; append(e.getKey()); append(KEYWORD_ASSIGN); append(e.getValue()); } if (first == false) { ++oldBlen; } } return oldBlen; } /** * Returns an iterator over the keywords, or null if we have an empty map. */ public Iterator<String> getKeywords() { Map<String, String> m = getKeywordMap(); return m.isEmpty() ? null : m.keySet().iterator(); } /** * Returns the value for the named keyword, or null if the keyword is not * present. */ public String getKeywordValue(String keywordName) { Map<String, String> m = getKeywordMap(); return m.isEmpty() ? null : m.get(AsciiUtil.toLowerString(keywordName.trim())); } /** * Set the keyword value only if it is not already set to something else. */ public void defaultKeywordValue(String keywordName, String value) { setKeywordValue(keywordName, value, false); } /** * Set the value for the named keyword, or unset it if value is null. If * keywordName itself is null, unset all keywords. If keywordName is not null, * value must not be null. */ public void setKeywordValue(String keywordName, String value) { setKeywordValue(keywordName, value, true); } /** * Set the value for the named keyword, or unset it if value is null. If * keywordName itself is null, unset all keywords. If keywordName is not null, * value must not be null. If reset is true, ignore any previous value for * the keyword, otherwise do not change the keyword (including removal of * one or all keywords). */ private void setKeywordValue(String keywordName, String value, boolean reset) { if (keywordName == null) { if (reset) { // force new map, ignore value keywords = Collections.<String, String>emptyMap(); } } else { keywordName = AsciiUtil.toLowerString(keywordName.trim()); if (keywordName.length() == 0) { throw new IllegalArgumentException("keyword must not be empty"); } if (value != null) { value = value.trim(); if (value.length() == 0) { throw new IllegalArgumentException("value must not be empty"); } } Map<String, String> m = getKeywordMap(); if (m.isEmpty()) { // it is EMPTY_MAP if (value != null) { // force new map keywords = new TreeMap<String, String>(getKeyComparator()); keywords.put(keywordName, value.trim()); } } else { if (reset || !m.containsKey(keywordName)) { if (value != null) { m.put(keywordName, value); } else { m.remove(keywordName); if (m.isEmpty()) { // force new map keywords = Collections.<String, String>emptyMap(); } } } } } } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with this * work for additional information regarding copyright ownership. The ASF * licenses this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package org.apache.hadoop.yarn.server.federation.store.records.impl.pb; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterIdProto; import org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProto; import org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterInfoProtoOrBuilder; import org.apache.hadoop.yarn.federation.proto.YarnServerFederationProtos.SubClusterStateProto; import org.apache.hadoop.yarn.server.federation.store.records.SubClusterId; import org.apache.hadoop.yarn.server.federation.store.records.SubClusterInfo; import org.apache.hadoop.yarn.server.federation.store.records.SubClusterState; import com.google.common.base.Preconditions; import org.apache.hadoop.thirdparty.protobuf.TextFormat; /** * Protocol buffer based implementation of {@link SubClusterInfo}. */ @Private @Unstable public class SubClusterInfoPBImpl extends SubClusterInfo { private SubClusterInfoProto proto = SubClusterInfoProto.getDefaultInstance(); private SubClusterInfoProto.Builder builder = null; private boolean viaProto = false; private SubClusterId subClusterId = null; public SubClusterInfoPBImpl() { builder = SubClusterInfoProto.newBuilder(); } public SubClusterInfoPBImpl(SubClusterInfoProto proto) { this.proto = proto; viaProto = true; } public SubClusterInfoProto getProto() { mergeLocalToProto(); proto = viaProto ? proto : builder.build(); viaProto = true; return proto; } private void mergeLocalToProto() { if (viaProto) { maybeInitBuilder(); } mergeLocalToBuilder(); proto = builder.build(); viaProto = true; } private void maybeInitBuilder() { if (viaProto || builder == null) { builder = SubClusterInfoProto.newBuilder(proto); } viaProto = false; } private void mergeLocalToBuilder() { if (this.subClusterId != null) { builder.setSubClusterId(convertToProtoFormat(this.subClusterId)); } } @Override public String toString() { return TextFormat.shortDebugString(getProto()); } @Override public SubClusterId getSubClusterId() { SubClusterInfoProtoOrBuilder p = viaProto ? proto : builder; if (this.subClusterId != null) { return this.subClusterId; } if (!p.hasSubClusterId()) { return null; } this.subClusterId = convertFromProtoFormat(p.getSubClusterId()); return this.subClusterId; } @Override public void setSubClusterId(SubClusterId subClusterId) { maybeInitBuilder(); if (subClusterId == null) { builder.clearSubClusterId(); } this.subClusterId = subClusterId; } @Override public String getAMRMServiceAddress() { SubClusterInfoProtoOrBuilder p = viaProto ? proto : builder; return (p.hasAMRMServiceAddress()) ? p.getAMRMServiceAddress() : null; } @Override public void setAMRMServiceAddress(String amRMServiceAddress) { maybeInitBuilder(); if (amRMServiceAddress == null) { builder.clearAMRMServiceAddress(); return; } builder.setAMRMServiceAddress(amRMServiceAddress); } @Override public String getClientRMServiceAddress() { SubClusterInfoProtoOrBuilder p = viaProto ? proto : builder; return (p.hasClientRMServiceAddress()) ? p.getClientRMServiceAddress() : null; } @Override public void setClientRMServiceAddress(String clientRMServiceAddress) { maybeInitBuilder(); if (clientRMServiceAddress == null) { builder.clearClientRMServiceAddress(); return; } builder.setClientRMServiceAddress(clientRMServiceAddress); } @Override public String getRMAdminServiceAddress() { SubClusterInfoProtoOrBuilder p = viaProto ? proto : builder; return (p.hasRMAdminServiceAddress()) ? p.getRMAdminServiceAddress() : null; } @Override public void setRMAdminServiceAddress(String rmAdminServiceAddress) { maybeInitBuilder(); if (rmAdminServiceAddress == null) { builder.clearRMAdminServiceAddress(); return; } builder.setRMAdminServiceAddress(rmAdminServiceAddress); } @Override public String getRMWebServiceAddress() { SubClusterInfoProtoOrBuilder p = viaProto ? proto : builder; return (p.hasRMWebServiceAddress()) ? p.getRMWebServiceAddress() : null; } @Override public void setRMWebServiceAddress(String rmWebServiceAddress) { maybeInitBuilder(); if (rmWebServiceAddress == null) { builder.clearRMWebServiceAddress(); return; } builder.setRMWebServiceAddress(rmWebServiceAddress); } @Override public long getLastHeartBeat() { SubClusterInfoProtoOrBuilder p = viaProto ? proto : builder; return p.getLastHeartBeat(); } @Override public void setLastHeartBeat(long time) { maybeInitBuilder(); builder.setLastHeartBeat(time); } @Override public SubClusterState getState() { SubClusterInfoProtoOrBuilder p = viaProto ? proto : builder; if (!p.hasState()) { return null; } return convertFromProtoFormat(p.getState()); } @Override public void setState(SubClusterState state) { maybeInitBuilder(); if (state == null) { builder.clearState(); return; } builder.setState(convertToProtoFormat(state)); } @Override public long getLastStartTime() { SubClusterInfoProtoOrBuilder p = viaProto ? proto : builder; return (p.hasLastStartTime()) ? p.getLastStartTime() : 0; } @Override public void setLastStartTime(long lastStartTime) { Preconditions.checkNotNull(builder); builder.setLastStartTime(lastStartTime); } @Override public String getCapability() { SubClusterInfoProtoOrBuilder p = viaProto ? proto : builder; return (p.hasCapability()) ? p.getCapability() : null; } @Override public void setCapability(String capability) { maybeInitBuilder(); if (capability == null) { builder.clearCapability(); return; } builder.setCapability(capability); } private SubClusterId convertFromProtoFormat(SubClusterIdProto clusterId) { return new SubClusterIdPBImpl(clusterId); } private SubClusterIdProto convertToProtoFormat(SubClusterId clusterId) { return ((SubClusterIdPBImpl) clusterId).getProto(); } private SubClusterState convertFromProtoFormat(SubClusterStateProto state) { return SubClusterState.valueOf(state.name()); } private SubClusterStateProto convertToProtoFormat(SubClusterState state) { return SubClusterStateProto.valueOf(state.name()); } }
package com.intirix.openmm.server.mt.app; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import com.intirix.openmm.server.api.beans.Episode; import com.intirix.openmm.server.api.beans.EpisodeDetails; import com.intirix.openmm.server.api.beans.MediaLink; import com.intirix.openmm.server.api.beans.Season; import com.intirix.openmm.server.api.beans.SeasonDetails; import com.intirix.openmm.server.api.beans.Show; import com.intirix.openmm.server.mt.OpenMMMidtierException; import com.intirix.openmm.server.mt.technical.ShowMidtier; import com.intirix.openmm.server.mt.technical.beans.EpisodeLinkCounts; import com.intirix.openmm.server.mt.technical.beans.SeasonEpisodeCounts; import com.intirix.openmm.server.vfs.FileSystemBrowser; /** * This class is basically a wrapper around the technical layer. This class has to be in the * application layer to allow it to call the WebCacheApp midtier. This class also has some * handle helper methods * @author jeff * */ public class ShowAppImpl implements ShowApp { private final Log log = LogFactory.getLog( ShowAppImpl.class ); private ShowMidtier showMidtier; private WebCacheApp webCacheApp; private VFSApp vfsApp; public ShowMidtier getShowMidtier() { return showMidtier; } public void setShowMidtier( ShowMidtier showMidtier ) { this.showMidtier = showMidtier; } public WebCacheApp getWebCacheApp() { return webCacheApp; } public void setWebCacheApp( WebCacheApp webCacheApp ) { this.webCacheApp = webCacheApp; } public void setVFSApp( VFSApp vfsApp ) { this.vfsApp = vfsApp; } public VFSApp getVFSApp() { return vfsApp; } public int addShow( Show show ) throws OpenMMMidtierException { return showMidtier.addShow( show ); } public List< Show > listShows() throws OpenMMMidtierException { final List< Show > shows = showMidtier.listShows(); final List< Show > ret = new ArrayList< Show >( shows.size() ); for ( final Show show: shows ) { try { final Show bean = (Show)show.clone(); bean.setBannerPath( webCacheApp.getWebCacheUrl( bean.getBannerPath() ) ); ret.add( bean ); } catch ( CloneNotSupportedException e ) { // ignore } } return ret; } public Show getShow( int id ) throws OpenMMMidtierException { try { final Show show = (Show)showMidtier.getShow( id ).clone(); show.setBannerPath( webCacheApp.getWebCacheUrl( show.getBannerPath() ) ); return show; } catch ( CloneNotSupportedException e ) { throw new OpenMMMidtierException( e ); } } public void updateShow( Show oldBean, Show newBean ) throws OpenMMMidtierException { showMidtier.updateShow( oldBean, newBean ); } public int addSeason( Season season ) throws OpenMMMidtierException { return showMidtier.addSeason( season ); } public List< Season > listSeasons( int showId ) throws OpenMMMidtierException { return showMidtier.listSeasons( showId ); } public List< SeasonDetails > listSeasonDetails( int showId ) throws OpenMMMidtierException { final List< Season > seasons = listSeasons( showId ); final List< SeasonEpisodeCounts > counts = showMidtier.listSeasonEpisodeCounts( showId ); final List< SeasonDetails > details = new ArrayList< SeasonDetails >( seasons.size() ); for ( final Season season: seasons ) { final SeasonDetails detail = new SeasonDetails(); detail.setSeason( season ); // search for the counts object that matches the season // this is n-squared, but the cardinality is pretty low // so, o well for ( final SeasonEpisodeCounts sec: counts ) { if ( sec.getSeasonId() == season.getId() ) { detail.setNumEpisodes( sec.getNumEpisodes() ); detail.setNumEpisodesAvailable( sec.getNumEpisodesAvailable() ); } } details.add( detail ); } return details; } public Season getSeason( int showId, int seasonNumber ) throws OpenMMMidtierException { for ( final Season season: listSeasons( showId ) ) { if ( season.getNumber() == seasonNumber ) { return season; } } return null; } public Season getSeason( int seasonId ) throws OpenMMMidtierException { return showMidtier.getSeason( seasonId ); } public void updateSeason( Season oldBean, Season newBean ) throws OpenMMMidtierException { showMidtier.updateSeason( oldBean, newBean ); } public int addEpisode( Episode episode ) throws OpenMMMidtierException { return showMidtier.addEpisode( episode ); } public List< Episode > listEpisodes( int seasonId ) throws OpenMMMidtierException { final List< Episode > episodes = showMidtier.listEpisodes( seasonId ); final List< Episode > ret = new ArrayList< Episode >( episodes.size() ); for ( final Episode episode: episodes ) { try { final Episode bean = (Episode)episode.clone(); // only check the webcache if we have a url if ( bean.getScreenshotPath().length() > 0 ) { bean.setScreenshotPath( webCacheApp.getWebCacheUrl( bean.getScreenshotPath() ) ); } ret.add( bean ); } catch ( CloneNotSupportedException e ) { // ignore } } return ret; } public List< EpisodeDetails > listEpisodeDetails( int seasonId ) throws OpenMMMidtierException { final List< Episode > episodes = showMidtier.listEpisodes( seasonId ); final List< EpisodeLinkCounts > counts = showMidtier.listEpisodeLinkCounts( seasonId ); final List< EpisodeDetails> details = new ArrayList< EpisodeDetails >( episodes.size() ); for ( final Episode episode: episodes ) { final EpisodeDetails detail = new EpisodeDetails(); detail.setEpisode( episode ); for ( final EpisodeLinkCounts elc: counts ) { if ( elc.getEpisodeId() == episode.getId() ) { detail.setNumExternalLinks( elc.getNumExternalLinks() ); detail.setNumInternalLinks( elc.getNumInternalLinks() ); } } details.add( detail ); } return details; } public Episode getEpisode( int showId, int seasonNumber, int epNum ) throws OpenMMMidtierException { final Season season = getSeason( showId, seasonNumber ); final List< Episode > episodes = listEpisodes( season.getId() ); for ( final Episode episode: episodes ) { if ( episode.getEpNum() == epNum ) { return episode; } } return null; } public Episode getEpisode( int epid ) throws OpenMMMidtierException { Episode bean; try { bean = (Episode)showMidtier.getEpisode( epid ).clone(); } catch ( CloneNotSupportedException e ) { throw new OpenMMMidtierException( e ); } bean.setScreenshotPath( webCacheApp.getWebCacheUrl( bean.getScreenshotPath() ) ); return bean; } public EpisodeDetails getEpisodeDetails( int showId, int seasonNumber, int epNum ) throws OpenMMMidtierException { final EpisodeDetails details = new EpisodeDetails(); details.setEpisode( getEpisode( showId, seasonNumber, epNum ) ); details.setLinks( showMidtier.getEpisodeLinks( details.getEpisode().getId() ).toArray( new MediaLink[]{} ) ); countLinks( details ); return details; } public EpisodeDetails getEpisodeDetails( int epid ) throws OpenMMMidtierException { final EpisodeDetails details = new EpisodeDetails(); details.setEpisode( getEpisode( epid ) ); details.setLinks( showMidtier.getEpisodeLinks( epid ).toArray( new MediaLink[]{} ) ); countLinks( details ); return details; } private void countLinks( final EpisodeDetails details ) { for ( final MediaLink link: details.getLinks() ) { if ( link.isAvailable() ) { if ( link.isInternal() ) { details.setNumInternalLinks( details.getNumInternalLinks() + 1 ); } else { details.setNumExternalLinks( details.getNumExternalLinks() + 1 ); } } } } public void updateEpisode( Episode oldBean, Episode newBean ) throws OpenMMMidtierException { showMidtier.updateEpisode( oldBean, newBean ); } public void watchEpisode( int episodeId ) throws OpenMMMidtierException { showMidtier.watchEpisode( episodeId ); } public void assignFile( int episodeId, String file, long size ) throws OpenMMMidtierException { showMidtier.assignFile( episodeId, file, size ); } public int assignFilesInDirectory( int showId, String folder ) throws OpenMMMidtierException { final FileSystemBrowser vfs = vfsApp.getBrowser(); final Pattern p = Pattern.compile( ".*\\D([0-9]+)[ex]([0-9]+).*" ); int ret = 0; try { for ( final String file: vfs.listFiles( folder ) ) { final Matcher m = p.matcher( file.toLowerCase() ); if ( m != null && m.matches() ) { final int season = Integer.parseInt( m.group( 1 ) ); final int episode = Integer.parseInt( m.group( 2 ) ); try { final Episode ep = getEpisode( showId, season, episode ); if ( ep == null ) { log.debug( "Could not find episode " + season + 'x' + episode + " for " + file ); } else { log.debug( "Assigning " + file + " to " + season + 'x' + ep.getEpNum() + " - " + ep.getName() ); assignFile( ep.getId(), folder + '/' + file, vfs.getFileLength( folder + '/' + file ) ); ret++; } } catch ( Exception e ) { log.warn( "Failed to assign s" + season + 'e' + episode, e ); } } } } catch ( IOException e ) { throw new OpenMMMidtierException( "Failed to assign files", e ); } return ret; } }
/* * Copyright 2015-2016 Jean-Christophe Sirot <sirot@chelonix.com> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jenkinsci.plugins.ansible; import java.io.IOException; import java.util.List; import javax.annotation.Nonnull; import com.cloudbees.plugins.credentials.CredentialsProvider; import com.cloudbees.plugins.credentials.common.StandardCredentials; import com.cloudbees.plugins.credentials.common.StandardUsernameCredentials; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import hudson.AbortException; import hudson.EnvVars; import hudson.Extension; import hudson.FilePath; import hudson.Launcher; import hudson.Util; import hudson.model.Computer; import hudson.model.Node; import hudson.model.Run; import hudson.model.TaskListener; import hudson.tasks.BuildStepMonitor; import hudson.tasks.Builder; import hudson.util.FormValidation; import jenkins.tasks.SimpleBuildStep; import org.apache.commons.lang.StringUtils; import org.kohsuke.stapler.DataBoundConstructor; import org.kohsuke.stapler.DataBoundSetter; import org.kohsuke.stapler.QueryParameter; /** * A builder which wraps an Ansible playbook invocation. */ public class AnsiblePlaybookBuilder extends Builder implements SimpleBuildStep { public final String playbook; public final Inventory inventory; public String ansibleName = null; public String limit = null; public String tags = null; public String skippedTags = null; public String startAtTask = null; /** * The id of the credentials to use. */ public String credentialsId = null; public String vaultCredentialsId = null; public boolean become = false; public String becomeUser = "root"; public boolean sudo = false; public String sudoUser = "root"; public int forks = 0; public boolean unbufferedOutput = true; public boolean colorizedOutput = false; public boolean disableHostKeyChecking = false; @Deprecated @SuppressWarnings("unused") @SuppressFBWarnings("URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD") public transient boolean hostKeyChecking = true; public String additionalParameters = null; public boolean copyCredentialsInWorkspace = false; public List<ExtraVar> extraVars; @Deprecated public AnsiblePlaybookBuilder(String ansibleName, String playbook, Inventory inventory, String limit, String tags, String skippedTags, String startAtTask, String credentialsId, boolean sudo, String sudoUser, int forks, boolean unbufferedOutput, boolean colorizedOutput, boolean hostKeyChecking, String additionalParameters) { this.ansibleName = ansibleName; this.playbook = playbook; this.inventory = inventory; this.limit = limit; this.tags = tags; this.skippedTags = skippedTags; this.startAtTask = startAtTask; this.credentialsId = credentialsId; this.sudo = sudo; this.sudoUser = sudoUser; this.forks = forks; this.unbufferedOutput = unbufferedOutput; this.colorizedOutput = colorizedOutput; //this.hostKeyChecking = hostKeyChecking; this.additionalParameters = additionalParameters; } @DataBoundConstructor public AnsiblePlaybookBuilder(String playbook, Inventory inventory) { this.playbook = playbook; this.inventory = inventory; } @DataBoundSetter public void setAnsibleName(String ansibleName) { this.ansibleName = ansibleName; } @DataBoundSetter public void setLimit(String limit) { this.limit = limit; } @DataBoundSetter public void setTags(String tags) { this.tags = tags; } @DataBoundSetter public void setSkippedTags(String skippedTags) { this.skippedTags = skippedTags; } @DataBoundSetter public void setStartAtTask(String startAtTask) { this.startAtTask = startAtTask; } @DataBoundSetter public void setCredentialsId(String credentialsId) { setCredentialsId(credentialsId, false); } public void setCredentialsId(String credentialsId, boolean copyCredentialsInWorkspace) { this.credentialsId = credentialsId; this.copyCredentialsInWorkspace = copyCredentialsInWorkspace; } @DataBoundSetter public void setVaultCredentialsId(String vaultCredentialsId) { this.vaultCredentialsId = vaultCredentialsId; } public void setBecome(boolean become) { this.become = become; } @DataBoundSetter public void setBecomeUser(String becomeUser) { this.becomeUser = becomeUser; } @DataBoundSetter public void setSudo(boolean sudo) { this.sudo = sudo; } @DataBoundSetter public void setSudoUser(String sudoUser) { this.sudoUser = sudoUser; } @DataBoundSetter public void setForks(int forks) { this.forks = forks; } @DataBoundSetter public void setUnbufferedOutput(boolean unbufferedOutput) { this.unbufferedOutput = unbufferedOutput; } @DataBoundSetter public void setColorizedOutput(boolean colorizedOutput) { this.colorizedOutput = colorizedOutput; } @DataBoundSetter public void setDisableHostKeyChecking(boolean disableHostKeyChecking) { this.disableHostKeyChecking = disableHostKeyChecking; } @DataBoundSetter @Deprecated public void setHostKeyChecking(boolean hostKeyChecking) { this.hostKeyChecking = true; } @DataBoundSetter public void setAdditionalParameters(String additionalParameters) { this.additionalParameters = additionalParameters; } @DataBoundSetter public void setExtraVars(List<ExtraVar> extraVars) { this.extraVars = extraVars; } @Override public void perform(@Nonnull Run<?, ?> run, @Nonnull FilePath ws, @Nonnull Launcher launcher, @Nonnull TaskListener listener) throws InterruptedException, IOException { Computer computer = ws.toComputer(); Node node; if (computer == null || (node = computer.getNode()) == null) { throw new AbortException("The ansible playbook build step requires to be launched on a node"); } perform(run, node, ws, launcher, listener, run.getEnvironment(listener)); } public void perform(@Nonnull Run<?, ?> run, @Nonnull Node node, @Nonnull FilePath ws, @Nonnull Launcher launcher, @Nonnull TaskListener listener, EnvVars envVars) throws InterruptedException, IOException { try { CLIRunner runner = new CLIRunner(run, ws, launcher, listener); String exe = AnsibleInstallation.getExecutable(ansibleName, AnsibleCommand.ANSIBLE_PLAYBOOK, node, listener, envVars); AnsiblePlaybookInvocation invocation = new AnsiblePlaybookInvocation(exe, run, ws, listener, envVars); invocation.setPlaybook(playbook); invocation.setInventory(inventory); invocation.setLimit(limit); invocation.setTags(tags); invocation.setSkippedTags(skippedTags); invocation.setStartTask(startAtTask); invocation.setBecome(become, becomeUser); invocation.setSudo(sudo, sudoUser); invocation.setForks(forks); invocation.setCredentials(StringUtils.isNotBlank(credentialsId) ? CredentialsProvider.findCredentialById(credentialsId, StandardUsernameCredentials.class, run) : null, copyCredentialsInWorkspace); invocation.setVaultCredentials(StringUtils.isNotBlank(vaultCredentialsId) ? CredentialsProvider.findCredentialById(vaultCredentialsId, StandardCredentials.class, run) : null); invocation.setExtraVars(extraVars); invocation.setAdditionalParameters(additionalParameters); invocation.setDisableHostKeyCheck(disableHostKeyChecking); invocation.setUnbufferedOutput(unbufferedOutput); invocation.setColorizedOutput(colorizedOutput); if (!invocation.execute(runner)) { throw new AbortException("Ansible playbook execution failed"); } } catch (IOException ioe) { Util.displayIOException(ioe, listener); ioe.printStackTrace(listener.fatalError(hudson.tasks.Messages.CommandInterpreter_CommandFailed())); throw ioe; } catch (AnsibleInvocationException aie) { listener.fatalError(aie.getMessage()); throw new AbortException(aie.getMessage()); } } @Override public BuildStepMonitor getRequiredMonitorService() { return BuildStepMonitor.NONE; } @Extension public static final class DescriptorImpl extends AbstractAnsibleBuilderDescriptor { public DescriptorImpl() { super("Invoke Ansible Playbook"); } public FormValidation doCheckPlaybook(@QueryParameter String playbook) { return checkNotNullOrEmpty(playbook, "Path to playbook must not be empty"); } } }
package kiwi.test.service; import java.util.List; import javax.persistence.EntityManager; import javax.persistence.NoResultException; import javax.persistence.Query; import kiwi.api.reasoning.ReasoningService; import kiwi.api.revision.RevisionService; import kiwi.api.triplestore.TripleStore; import kiwi.model.content.ContentItem; import kiwi.model.kbase.KiWiTriple; import kiwi.model.kbase.KiWiUriResource; import kiwi.model.revision.CIVersion; import kiwi.model.revision.MetadataUpdate; import kiwi.model.revision.Revision; import kiwi.test.base.KiWiTest; import org.jboss.seam.Component; import org.jboss.seam.log.Log; import org.jboss.seam.log.Logging; import org.testng.Assert; import org.testng.annotations.Test; public class UpdateMetadataTest extends KiWiTest { private Long countExistingTriples = new Long(0); private Long countExistingNamespaces = new Long(0); private Long countMetadataUpdates = new Long(0); private Long countRevisions = new Long(0); private ContentItem contentItem; /** * initializes currentContentItem and * counts for several entities * @throws Exception */ @Test public void init() throws Exception { clearDatabase(); /** * initialises the currentContentItem */ new FacesRequest() { @Override protected void invokeApplication() throws Exception { // ReasoningService reasoningService = (ReasoningService) Component.getInstance("kiwi.core.reasoningService"); // reasoningService.disableReasoning(); // necessary???? contentItem = (ContentItem) Component.getInstance("currentContentItem"); } }.run(); /** * initializes the variables: * countExistingTriples, * countExistingNamespaces, * countMetadataUpdates * countRevisions */ new FacesRequest() { @Override protected void invokeApplication() throws Exception { EntityManager em = (EntityManager) Component.getInstance("entityManager"); Query q1 = em.createQuery("select count(t) from KiWiTriple t where t.deleted=false"); q1.setMaxResults(1); countExistingTriples = (Long) q1.getSingleResult(); Query q2 = em.createQuery("select count(n) from KiWiNamespace n where n.deleted=false"); q2.setMaxResults(1); countExistingNamespaces = (Long) q2.getSingleResult(); Query q3 = em.createQuery("select count(mdu) from MetadataUpdate mdu"); q3.setMaxResults(1); countMetadataUpdates = (Long) q3.getSingleResult(); Query q4 = em.createQuery("select count(rev) from Revision rev"); q4.setMaxResults(1); countRevisions = (Long) q4.getSingleResult(); } }.run(); } @Test(dependsOnMethods = {"testRevision"}) public void endOfTest() throws Exception { /** * initialises the currentContentItem */ new FacesRequest() { @Override protected void invokeApplication() throws Exception { ReasoningService reasoningService = (ReasoningService) Component.getInstance("kiwi.core.reasoningService"); reasoningService.enableReasoning(); } }.run(); } @Test(dependsOnMethods = { "init" }) public void testRevision() throws Exception { /** * adds two triples and two namespaces */ new FacesRequest() { @Override protected void invokeApplication() { Log log = Logging.getLog(UpdateMetadataTest.class); log.info(">>>>>>>>>> Start of method testRevision() (1) "); TripleStore ts = (TripleStore) Component.getInstance("tripleStore"); EntityManager em = (EntityManager) Component.getInstance("entityManager"); log.info(">>>>>>>>>> testRevision() begin creating UriResources"); KiWiUriResource subject1 = ts.createUriResource("http://www.example.org/subject"); KiWiUriResource predicate1 = ts.createUriResource("http://www.example.org/predicate"); KiWiUriResource object1 = ts.createUriResource("http://www.example.org/object"); KiWiUriResource subject2 = ts.createUriResource("http://www.example.org/subject2"); KiWiUriResource predicate2 = ts.createUriResource("http://www.example.org/predicate2"); KiWiUriResource object2 = ts.createUriResource("http://www.example.org/object2"); log.info(">>>>>>>>>> testRevision() begin creating Triples"); ts.createTriple(subject1, predicate1, object1); ts.createTriple(subject2, predicate2, object2); ts.setNamespace("ex", "http://www.example.org"); ts.setNamespace("ex2", "http://www.example2.org"); log.info(">>>>>>>>>> testRevision() checking number of triples/namespaces with em queries"); Query q1 = em.createQuery("select count(t) from KiWiTriple t where t.deleted=false"); Query q2 = em.createQuery("select count(n) from KiWiNamespace n where n.deleted=false"); q1.setMaxResults(1); q2.setMaxResults(1); // triples and namespaces should just be added after the transaction Assert.assertEquals(q1.getSingleResult(),countExistingTriples+0L); Assert.assertEquals(q2.getSingleResult(),countExistingNamespaces+0L); } }.run(); /** * checks amount of triples and namespaces */ new FacesRequest() { @Override protected void invokeApplication() { EntityManager em = (EntityManager) Component.getInstance("entityManager"); Log log = Logging.getLog(UpdateMetadataTest.class); log.info(">>>>>>>>>> testRevision() new transaction ended: " + "checking number of triples/namespaces (2)"); Query q1 = em.createQuery("select count(t) from KiWiTriple t where t.property.uri = 'http://www.example.org/predicate' and t.deleted=false"); Query q2 = em.createQuery("select count(t) from KiWiTriple t where t.property.uri = 'http://www.example.org/predicate2' and t.deleted=false"); Query q3 = em.createQuery("select count(n) from KiWiNamespace n where n.uri = 'http://www.example.org' and n.deleted=false"); Query q4 = em.createQuery("select count(n) from KiWiNamespace n where n.uri = 'http://www.example2.org' and n.deleted=false"); q1.setMaxResults(1); q2.setMaxResults(1); q3.setMaxResults(1); q4.setMaxResults(1); // triples and namespaces should just be added after the transaction Assert.assertEquals(q1.getSingleResult(),1L); Assert.assertEquals(q2.getSingleResult(),1L); Assert.assertEquals(q3.getSingleResult(),1L); Assert.assertEquals(q4.getSingleResult(),1L); /*countMetadataUpdates must be increased by 2, because now we got 2 MetadataUpdates per Revision * (-> 2 diverse ContentItem Versions)*/ countMetadataUpdates = countMetadataUpdates+2; countRevisions = countRevisions+1; log.info(">>>>>>>>>> testRevision() end of 'checking number of triples/namespaces'"); } }.run(); /** * adds two other triples and namespaces and deleted one triple */ new FacesRequest() { @Override protected void invokeApplication() { TripleStore ts = (TripleStore) Component.getInstance("tripleStore"); EntityManager em = (EntityManager) Component.getInstance("entityManager"); Log log = Logging.getLog(UpdateMetadataTest.class); log.info(">>>>>>>>>> testRevision() new transaction ended: " + "creating more triples/namespaces (3)"); KiWiUriResource subject1 = ts.createUriResource("http://www.example.org/subject3"); KiWiUriResource predicate1 = ts.createUriResource("http://www.example.org/predicate3"); KiWiUriResource object1 = ts.createUriResource("http://www.example.org/object3"); Query q1 = em.createQuery("select t from KiWiTriple t where t.property.uri = 'http://www.example.org/predicate2' and t.deleted=false"); q1.setMaxResults(1); ts.removeTriple((KiWiTriple) q1.getSingleResult()); KiWiUriResource subject2 = ts.createUriResource("http://www.example.org/subject2"); KiWiUriResource predicate2 = ts.createUriResource("http://www.example.org/predicate2"); KiWiUriResource object2 = ts.createUriResource("http://www.example.org/object4"); ts.createTriple(subject1, predicate1, object1); ts.createTriple(subject2, predicate2, object2); ts.setNamespace("ex3", "http://www.example3.org"); ts.setNamespace("ex4", "http://www.example4.org"); log.info(">>>>>>>>>> testRevision() end of creating more triples/namespaces"); } }.run(); /** * checks amount of triples and namespaces */ new FacesRequest() { @Override protected void invokeApplication() { EntityManager em = (EntityManager) Component.getInstance("entityManager"); Log log = Logging.getLog(UpdateMetadataTest.class); log.info(">>>>>>>>>> testRevision() () new transaction ended: " + "counting triples/namespaces (4)"); Query q1 = em.createQuery("select count(t) from KiWiTriple t where t.property.uri = 'http://www.example.org/predicate' and t.deleted=false"); Query q2 = em.createQuery("select count(t) from KiWiTriple t where t.property.uri = 'http://www.example.org/predicate3' and t.deleted=false"); Query q3 = em.createQuery("select count(t) from KiWiTriple t where t.property.uri = 'http://www.example.org/predicate2' and t.deleted=false"); Query q4 = em.createQuery("select count(t) from KiWiTriple t where t.object.uri = 'http://www.example.org/object4' and t.deleted=false"); Query q5 = em.createQuery("select count(n) from KiWiNamespace n where n.uri = 'http://www.example.org' and n.deleted=false"); Query q6 = em.createQuery("select count(n) from KiWiNamespace n where n.uri = 'http://www.example2.org' and n.deleted=false"); Query q7 = em.createQuery("select count(n) from KiWiNamespace n where n.uri = 'http://www.example3.org' and n.deleted=false"); Query q8 = em.createQuery("select count(n) from KiWiNamespace n where n.uri = 'http://www.example4.org' and n.deleted=false"); q1.setMaxResults(1); q2.setMaxResults(1); q3.setMaxResults(1); q4.setMaxResults(1); q5.setMaxResults(1); q6.setMaxResults(1); q7.setMaxResults(1); q8.setMaxResults(1); // triples and namespaces should just be added after the transaction Assert.assertEquals(q1.getSingleResult(),1L); Assert.assertEquals(q2.getSingleResult(),1L); Assert.assertEquals(q3.getSingleResult(),1L); Assert.assertEquals(q4.getSingleResult(),1L); Assert.assertEquals(q5.getSingleResult(),1L); Assert.assertEquals(q6.getSingleResult(),1L); Assert.assertEquals(q7.getSingleResult(),1L); Assert.assertEquals(q8.getSingleResult(),1L); /* again increase by 2, because two diverse CI Versions (each holding a MDU) were created */ countMetadataUpdates = countMetadataUpdates + 2; countRevisions = countRevisions+1; log.info(">>>>>>>>>> testRevision() end of counting triples/namespaces"); } }.run(); /** * checks amount of current MetadataUpdate triples and calls restore() on first Revision */ new FacesRequest() { @Override protected void invokeApplication() { Log log = Logging.getLog(UpdateMetadataTest.class); log.info(">>>>>>>>>> testRevision() () (5)"); EntityManager em = (EntityManager) Component.getInstance("entityManager"); log.info("###### entitymanager created #0 ", em); RevisionService revS = (RevisionService) Component.getInstance("revisionService"); log.info("###### revisionservice created #0 ", revS); Query q = em.createQuery("select t from KiWiTriple t " + "where t.property.uri = " + "'http://www.example.org/predicate' and t.deleted=false"); log.info(">>>>>>>>>> testRevision() () (5) query"); KiWiTriple t = null; try { t = (KiWiTriple) q.getSingleResult(); } catch (NoResultException e) { e.printStackTrace(); } Assert.assertNotNull(t); log.info(">>>>>>>>>> testRevision() () (5) t not null"); List<CIVersion> versions = t.getSubject().getContentItem().getVersions(); log.info(">>>>>>>>>> testRevision() () (5) t get civersions"); int versionCount = versions.size(); CIVersion getLastVersion = versions.get(versionCount-1); log.info(">>>>>>>>>> testRevision() () (5) t get last version"); Revision rev = getLastVersion.getRevision(); log.info(">>>>>>>>>> testRevision() () (5) t get rev"); // Revision rev = em.find(Revision.class, (countRevisions-1)); log.info("###### Revision queried #0 ", rev); Assert.assertNotNull(rev); log.info("###### Revision query ci versions"); Assert.assertNotNull(rev.getContentItemVersions()); log.info("###### Revision query ci versions size"); Assert.assertEquals(2L, rev.getContentItemVersions().size()); boolean checked_add_rem = false; boolean checked_add = false; for(CIVersion civ : rev.getContentItemVersions()) { log.info("###### Ensure that metadataupdate != null"); Assert.assertNotNull(civ.getMetadataUpdate()); MetadataUpdate mdu = civ.getMetadataUpdate(); if(mdu.getRemovedTriples() != null && mdu.getRemovedTriples().size() != 0) { checked_add_rem = true; } else { log.info("###### Metadataupdate - added #0 triples", mdu.getAddedTriples().size()); Assert.assertEquals(mdu.getAddedTriples().size(),1L); checked_add = true; } } Assert.assertTrue(checked_add); log.info("###### Added triples have been checked "); Assert.assertTrue(!checked_add_rem); log.info("###### Before restoring revision with id #0 ", (countRevisions-1)); // Query q2 = em.createQuery("select r from Revision r where r.id='"+(countRevisions-1)+"'"); // q2.setMaxResults(1); // log.info("before query"); // Revision r = (Revision) q2.getSingleResult(); // log.info("after query, before restore"); revS.restore(rev); log.info("after restore"); } }.run(); /** * checks amount of triples and namespaces */ new FacesRequest() { @Override protected void invokeApplication() { Log log = Logging.getLog(UpdateMetadataTest.class); log.info(">>>>>>>>>> testRevision() () (6)"); EntityManager em = (EntityManager) Component.getInstance("entityManager"); Query q1 = em.createQuery("select count(t) from KiWiTriple t where t.property.uri = 'http://www.example.org/predicate' and t.deleted=false"); Query q2 = em.createQuery("select count(t) from KiWiTriple t where t.property.uri = 'http://www.example.org/predicate2' and t.deleted=false"); Query q3 = em.createQuery("select count(n) from KiWiNamespace n where n.uri = 'http://www.example.org' and n.deleted=false"); Query q4 = em.createQuery("select count(n) from KiWiNamespace n where n.uri = 'http://www.example2.org' and n.deleted=false"); q1.setMaxResults(1); q2.setMaxResults(1); q3.setMaxResults(1); q4.setMaxResults(1); // triples and namespaces should just be added after the transaction Assert.assertEquals(q1.getSingleResult(),1L); Assert.assertEquals(q2.getSingleResult(),1L); Assert.assertEquals(q3.getSingleResult(),1L); Assert.assertEquals(q4.getSingleResult(),1L); countRevisions = countRevisions + 1; countMetadataUpdates = countMetadataUpdates + 2; } }.run(); /** * adds two other triples and namespaces and deleted one triple */ new FacesRequest() { @Override protected void invokeApplication() { Log log = Logging.getLog(UpdateMetadataTest.class); log.info(">>>>>>>>>> testRevision() () (7)"); TripleStore ts = (TripleStore) Component.getInstance("tripleStore"); EntityManager em = (EntityManager) Component.getInstance("entityManager"); KiWiUriResource subject1 = ts.createUriResource("http://www.example.org/subject5"); KiWiUriResource predicate1 = ts.createUriResource("http://www.example.org/predicate5"); KiWiUriResource object1 = ts.createUriResource("http://www.example.org/object5"); Query q1 = em.createQuery("select t from KiWiTriple t where t.property.uri = 'http://www.example.org/predicate2' and t.deleted=false"); q1.setMaxResults(1); ts.removeTriple((KiWiTriple) q1.getSingleResult()); KiWiUriResource subject2 = ts.createUriResource("http://www.example.org/subject6"); KiWiUriResource predicate2 = ts.createUriResource("http://www.example.org/predicate6"); KiWiUriResource object2 = ts.createUriResource("http://www.example.org/object6"); ts.createTriple(subject1, predicate1, object1); ts.createTriple(subject2, predicate2, object2); ts.removeNamespace("ex2"); ts.setNamespace("ex5", "http://www.example5.org"); ts.setNamespace("ex6", "http://www.example6.org"); } }.run(); /** * checks amount of triples and namespaces */ new FacesRequest() { @Override protected void invokeApplication() { Log log = Logging.getLog(UpdateMetadataTest.class); log.info(">>>>>>>>>> testRevision() () (8)"); EntityManager em = (EntityManager) Component.getInstance("entityManager"); Query q1 = em.createQuery("select count(t) from KiWiTriple t where t.property.uri = 'http://www.example.org/predicate' and t.deleted=false"); Query q2 = em.createQuery("select count(t) from KiWiTriple t where t.property.uri = 'http://www.example.org/predicate2' and t.deleted=false"); Query q3 = em.createQuery("select count(t) from KiWiTriple t where t.property.uri = 'http://www.example.org/predicate5' and t.deleted=false"); Query q4 = em.createQuery("select count(t) from KiWiTriple t where t.property.uri = 'http://www.example.org/predicate6' and t.deleted=false"); Query q5 = em.createQuery("select count(n) from KiWiNamespace n where n.uri = 'http://www.example.org' and n.deleted=false"); Query q6 = em.createQuery("select count(n) from KiWiNamespace n where n.uri = 'http://www.example2.org' and n.deleted=false"); Query q7 = em.createQuery("select count(n) from KiWiNamespace n where n.uri = 'http://www.example5.org' and n.deleted=false"); Query q8 = em.createQuery("select count(n) from KiWiNamespace n where n.uri = 'http://www.example6.org' and n.deleted=false"); q1.setMaxResults(1); q2.setMaxResults(1); q3.setMaxResults(1); q4.setMaxResults(1); q5.setMaxResults(1); q6.setMaxResults(1); q7.setMaxResults(1); q8.setMaxResults(1); // triples and namespaces should just be added after the transaction Assert.assertEquals(q1.getSingleResult(),1L); Assert.assertEquals(q2.getSingleResult(),0L); Assert.assertEquals(q3.getSingleResult(),1L); Assert.assertEquals(q4.getSingleResult(),1L); Assert.assertEquals(q5.getSingleResult(),1L); Assert.assertEquals(q6.getSingleResult(),0L); Assert.assertEquals(q7.getSingleResult(),1L); Assert.assertEquals(q8.getSingleResult(),1L); countRevisions = countRevisions + 1; countMetadataUpdates = countMetadataUpdates + 3; } }.run(); /** * checks amount of current MetadataUpdate triples and calls restore() on first MetadataUpdate */ new FacesRequest() { @Override protected void invokeApplication() { Log log = Logging.getLog(UpdateMetadataTest.class); log.info(">>>>>>>>>> testRevision() () (9)"); EntityManager em = (EntityManager) Component.getInstance("entityManager"); log.info(">>>>>>>>>> testRevision() (9) initialised EM"); RevisionService revS = (RevisionService) Component.getInstance("revisionService"); log.info(">>>>>>>>>> testRevision() (9) initialised RevisionService"); Query q = em.createQuery("select t from KiWiTriple t where t.property.uri = 'http://www.example.org/predicate' and t.deleted=false"); KiWiTriple t = null; try { t = (KiWiTriple) q.getSingleResult(); } catch (NoResultException e) { e.printStackTrace(); } Assert.assertNotNull(t); List<CIVersion> versions = t.getSubject().getContentItem().getVersions(); int versionCount = versions.size(); CIVersion getLastVersion = versions.get(versionCount-1); Revision rev = getLastVersion.getRevision(); // Revision rev = em.find(Revision.class, countRevisions-3); log.info("###### Revision queried #0 ", rev); Assert.assertNotNull(rev); log.info("###### Revision query ci versions"); Assert.assertNotNull(rev.getContentItemVersions()); log.info("###### Revision query ci versions size"); Assert.assertEquals(rev.getContentItemVersions().size(), 2L); boolean checked_add_rem = false; boolean checked_add = false; for(CIVersion civ : rev.getContentItemVersions()) { log.info("###### Ensure that metadataupdate != null"); Assert.assertNotNull(civ.getMetadataUpdate()); MetadataUpdate mdu = civ.getMetadataUpdate(); if(mdu.getRemovedTriples() != null && mdu.getRemovedTriples().size() != 0) { checked_add_rem = true; } else { log.info("###### Metadataupdate - added #0 triples", mdu.getAddedTriples().size()); Assert.assertEquals(mdu.getAddedTriples().size(),1L); checked_add = true; } } Assert.assertTrue(checked_add); Assert.assertTrue(!checked_add_rem); revS.restore(rev); log.info(">>>>>>>>>> testRevision() END OF (9)"); } }.run(); /** * checks amount of triples and namespaces */ new FacesRequest() { @Override protected void invokeApplication() { Log log = Logging.getLog(UpdateMetadataTest.class); log.info(">>>>>>>>>> testRevision() () (10)"); EntityManager em = (EntityManager) Component.getInstance("entityManager"); Query q1 = em.createQuery("select count(t) from KiWiTriple t where t.property.uri = 'http://www.example.org/predicate' and t.deleted=false"); Query q2 = em.createQuery("select count(t) from KiWiTriple t where t.property.uri = 'http://www.example.org/predicate2' and t.deleted=false"); q1.setMaxResults(1); q2.setMaxResults(1); // triples and namespaces should just be added after the transaction Assert.assertEquals(q1.getSingleResult(),1L); Assert.assertEquals(q2.getSingleResult(),1L); countMetadataUpdates = countMetadataUpdates + 3; log.info(">>>>>>>>>> testRevision() END OF (10)"); } }.run(); } }
package com.chinamobile.bcbsp.workermanager; import static org.junit.Assert.*; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import java.io.File; import java.io.IOException; import java.net.InetSocketAddress; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.net.NetUtils; import org.junit.After; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; import com.chinamobile.bcbsp.BSPConfiguration; import com.chinamobile.bcbsp.Constants; import com.chinamobile.bcbsp.TestUtil; import com.chinamobile.bcbsp.action.Directive; import com.chinamobile.bcbsp.action.LaunchStaffAction; import com.chinamobile.bcbsp.bspcontroller.Counters; import com.chinamobile.bcbsp.bspstaff.Staff; import com.chinamobile.bcbsp.controllerProtocol.ControllerProtocolServer; import com.chinamobile.bcbsp.fault.storage.Fault; import com.chinamobile.bcbsp.rpc.ControllerProtocol; import com.chinamobile.bcbsp.sync.SuperStepReportContainer; import com.chinamobile.bcbsp.sync.WorkerSSControllerInterface; import com.chinamobile.bcbsp.thirdPartyInterface.Zookeeper.BSPIds; import com.chinamobile.bcbsp.thirdPartyInterface.Zookeeper.BSPZookeeper; import com.chinamobile.bcbsp.thirdPartyInterface.Zookeeper.impl.BSPCreateModeImpl; import com.chinamobile.bcbsp.util.BSPJob; import com.chinamobile.bcbsp.util.BSPJobID; import com.chinamobile.bcbsp.util.StaffAttemptID; import com.chinamobile.bcbsp.util.StaffStatus; import com.chinamobile.bcbsp.workermanager.WorkerManager.RunningJob; import com.chinamobile.bcbsp.workermanager.WorkerManager.StaffInProgress; public class WorkerManagerTest { WorkerManager wm = null; BSPConfiguration conf = null; @Before public void setUp() throws Exception { conf = new BSPConfiguration(); conf.set(Constants.BC_BSP_CONTROLLER_ADDRESS, "192.168.1.2"); conf.set(Constants.ZOOKEEPER_QUORUM, "192.168.1.3"); conf.set(Constants.ZOOKEPER_CLIENT_PORT, "2181"); conf.set(Constants.BSPCONTROLLER_STANDBY_LEADER, "/leader/"); wm = new WorkerManager(conf); } @Test public void testWorkerManager() throws IOException { WorkerManager wmtest=new WorkerManager(conf); String port=wmtest.getConf().get(Constants.ZOOKEPER_CLIENT_PORT); assertEquals("2181",port); } @Test public void testDispatch() throws IOException { BSPJobID jobID = new BSPJobID(); Directive directive = new Directive(); assertEquals(true, wm.dispatch(jobID, directive, true, true, 0)); } @Test public void testGetLocalDirs() throws IOException { WorkerManager wm1 = new WorkerManager(conf); assertEquals(conf.getStrings(Constants.BC_BSP_LOCAL_DIRECTORY), wm1.getLocalDirs()); } @Test public void testDeleteLocalFiles() throws IOException { File path = new File("/root/usr/test"); File dir = new File(path, "hello.txt"); conf.set(Constants.BC_BSP_LOCAL_DIRECTORY, "/root/usr/test/hello.txt"); WorkerManager wm = new WorkerManager(conf); wm.deleteLocalFiles(); assertEquals(false, dir.exists()); } @Test public void testDeleteLocalDir() throws IOException { File path = new File("/root/usr/test"); conf.set(Constants.BC_BSP_LOCAL_DIRECTORY, "/root/usr/test"); wm.deleteLocalFiles(); assertEquals(false, path.exists()); } @Test public void testDeleteLocalFilesString() throws IOException { File path = new File("/root/usr/test"); File dir = new File(path, "hello.txt"); conf.set(Constants.BC_BSP_LOCAL_DIRECTORY, "/root/usr/test/hello.txt"); WorkerManager wm = new WorkerManager(conf); String[] localDirs = conf.getStrings(Constants.BC_BSP_LOCAL_DIRECTORY); wm.deleteLocalFiles("/root/usr/test/hello.txt"); assertEquals(false, dir.exists()); } @Test public void testCleanupStorage() throws IOException { File path = new File("/root/usr/test/"); File dir = new File(path, "hello.txt"); conf.set(Constants.BC_BSP_LOCAL_DIRECTORY, "/root/usr/test/hello.txt"); WorkerManager wm = new WorkerManager(conf); String[] localDirs = conf.getStrings(Constants.BC_BSP_LOCAL_DIRECTORY); wm.cleanupStorage(); assertEquals(false, dir.exists()); } @Test public void testUpdateStaffStatistics() throws Exception { wm.setCurrentStaffsCount(2); wm.setFinishedStaffsCount(2); Map<StaffAttemptID, WorkerManager.StaffInProgress> finishedStaffs = new ConcurrentHashMap<StaffAttemptID, WorkerManager.StaffInProgress>(); Map<BSPJobID, RunningJob> runningJobs = new ConcurrentHashMap<BSPJobID, RunningJob>(); Map<BSPJobID, RunningJob> finishedJobs = new ConcurrentHashMap<BSPJobID, RunningJob>(); Map<BSPJobID, WorkerAgentForJob> runningJobtoWorkerAgent = new ConcurrentHashMap<BSPJobID, WorkerAgentForJob>(); BSPJobID bspJobId1 = new BSPJobID(); BSPJobID bspJobId2 = new BSPJobID(); RunningJob rJob1 = new RunningJob(); RunningJob rJob2 = new RunningJob(); rJob1.setStaffCounter(2); rJob2.setStaffCounter(2); StaffAttemptID staffID1_1 = new StaffAttemptID(bspJobId1.getJtIdentifier(), bspJobId1.getId(), 0, 0); StaffAttemptID staffID1_2 = new StaffAttemptID(bspJobId1.getJtIdentifier(), bspJobId1.getId(), 0, 0); StaffAttemptID staffID2_1 = new StaffAttemptID(bspJobId2.getJtIdentifier(), bspJobId1.getId(), 0, 0); StaffAttemptID staffID2_2 = new StaffAttemptID(bspJobId2.getJtIdentifier(), bspJobId1.getId(), 0, 0); WorkerManager.StaffInProgress sip1 = wm.new StaffInProgress(); WorkerManager.StaffInProgress sip2 = wm.new StaffInProgress(); finishedStaffs.put(staffID1_1, sip1); finishedStaffs.put(staffID1_2, sip2); wm.setFinishedStaffs(finishedStaffs); runningJobs.put(bspJobId1, rJob1); wm.setRunningJobs(runningJobs); WorkerAgentForJob wafj = new WorkerAgentForJob( TestUtil.createDonothingObject(WorkerSSControllerInterface.class)); BSPJobID jobID = new BSPJobID(); StaffAttemptID staffID = new StaffAttemptID(jobID.getJtIdentifier(), jobID.getId(), 0, 0); runningJobtoWorkerAgent.put(bspJobId2, wafj); wm.setRunningJobtoWorkerAgent(runningJobtoWorkerAgent); wm.updateStaffStatistics(bspJobId2); assertEquals(1, (int) wm.getCurrentStaffsCount()); assertEquals(3, (int) wm.getFinishedStaffsCount()); assertEquals(1, (int) wm.getRunningJobs().get(bspJobId2).getStaffCounter()); } // @Test // public void testIsRunning() { // Boolean running = true; // assertEquals(true, wm.isRunning()); // } @Test public void testGetStaff() throws IOException { Map<StaffAttemptID, WorkerManager.StaffInProgress> runningStaffs = new ConcurrentHashMap<StaffAttemptID, WorkerManager.StaffInProgress>(); String staffId_one = "attempt_201207241653_0001_000001_0"; StaffAttemptID staffid_one = new StaffAttemptID().forName(staffId_one); LaunchStaffAction las = new LaunchStaffAction(); Staff stf = las.getStaff(); BSPJob bspjob = new BSPJob(conf, 2); String workerManagerName = "slave1"; WorkerManager.StaffInProgress sip = wm.new StaffInProgress(); runningStaffs.put(staffid_one, sip); wm.setRunningStaffs(runningStaffs); assertEquals(stf, wm.getStaff(staffid_one)); } @Test public void testGetStaffRecoveryState() { Map<StaffAttemptID, WorkerManager.StaffInProgress> runningStaffs = new ConcurrentHashMap<StaffAttemptID, WorkerManager.StaffInProgress>(); String staffId_one = "attempt_201207241653_0001_000001_0"; StaffAttemptID staffid_one = new StaffAttemptID().forName(staffId_one); WorkerManager.StaffInProgress sip = wm.new StaffInProgress(); Fault fault = new Fault(); runningStaffs.put(staffid_one, sip); runningStaffs.get(staffid_one).setStaffStatus(1, fault); wm.setRunningStaffs(runningStaffs); assertEquals(false, wm.getStaffRecoveryState(staffid_one)); } @Test public void testGetStaffChangeWorkerState() { Map<StaffAttemptID, WorkerManager.StaffInProgress> runningStaffs = new ConcurrentHashMap<StaffAttemptID, WorkerManager.StaffInProgress>(); String staffId_one = "attempt_201207241653_0001_000001_0"; StaffAttemptID staffid_one = new StaffAttemptID().forName(staffId_one); WorkerManager.StaffInProgress sip = wm.new StaffInProgress(); runningStaffs.put(staffid_one, sip); wm.setRunningStaffs(runningStaffs); assertEquals(false, wm.getStaffChangeWorkerState(staffid_one)); } @Test public void testGetFailCounter() { Map<StaffAttemptID, WorkerManager.StaffInProgress> runningStaffs = new ConcurrentHashMap<StaffAttemptID, WorkerManager.StaffInProgress>(); String staffId_one = "attempt_201207241653_0001_000001_0"; StaffAttemptID staffid_one = new StaffAttemptID().forName(staffId_one); WorkerManager.StaffInProgress sip = wm.new StaffInProgress(); runningStaffs.put(staffid_one, sip); wm.setRunningStaffs(runningStaffs); assertEquals(0, wm.getFailCounter(staffid_one)); sip.setFailCounter(2); assertEquals(2, wm.getFailCounter(staffid_one)); } @Test public void testGetWorkerManagerNameBSPJobIDStaffAttemptID() { WorkerAgentForJob wafj = new WorkerAgentForJob( TestUtil.createDonothingObject(WorkerSSControllerInterface.class)); Map<BSPJobID, WorkerAgentForJob> runningJobs = new ConcurrentHashMap<BSPJobID, WorkerAgentForJob>(); try { TestUtil.set(wm, "runningJobtoWorkerAgent", runningJobs); TestUtil.set(wafj, "workerManagerName", "Test"); } catch (Exception e) { e.printStackTrace(); } BSPJobID jobID = new BSPJobID(); StaffAttemptID staffID = new StaffAttemptID(jobID.getJtIdentifier(), jobID.getId(), 0, 0); runningJobs.put(jobID, wafj); String workerManagerName = wm.getWorkerManagerName(jobID, staffID); assertEquals(true, "Test".equals(workerManagerName)); } @Test public void testAddCounters() { // need to change Counters counters = new Counters(); WorkerAgentForJob wafj = new WorkerAgentForJob( TestUtil.createDonothingObject(WorkerSSControllerInterface.class)); BSPJobID jobID = new BSPJobID(); StaffAttemptID staffID = new StaffAttemptID(jobID.getJtIdentifier(), jobID.getId(), 0, 0); wafj.setNumberWorkers(jobID, staffID, 2); Map<BSPJobID, WorkerAgentForJob> runningJobstoWorkerAgent = new ConcurrentHashMap<BSPJobID, WorkerAgentForJob>(); runningJobstoWorkerAgent.put(jobID, wafj); wm.setRunningJobtoWorkerAgent(runningJobstoWorkerAgent); wm.addCounters(jobID, counters); assertEquals(true, counters.equals(wafj.getCounters())); } @Test public void testGetNumberWorkers() { WorkerAgentForJob wafj = new WorkerAgentForJob( TestUtil.createDonothingObject(WorkerSSControllerInterface.class)); BSPJobID jobID = new BSPJobID(); StaffAttemptID staffID = new StaffAttemptID(jobID.getJtIdentifier(), jobID.getId(), 0, 0); wafj.setNumberWorkers(jobID, staffID, 2); Map<BSPJobID, WorkerAgentForJob> runningJobs = new ConcurrentHashMap<BSPJobID, WorkerAgentForJob>(); runningJobs.put(jobID, wafj); try { TestUtil.set(wm, "runningJobtoWorkerAgent", runningJobs); } catch (Exception e) { e.printStackTrace(); } int num = wm.getNumberWorkers(jobID, staffID); assertEquals(2, num); } @Test public void testSetNumberWorkers() { WorkerAgentForJob wafj = new WorkerAgentForJob( TestUtil.createDonothingObject(WorkerSSControllerInterface.class)); BSPJobID jobID = new BSPJobID(); StaffAttemptID staffID = new StaffAttemptID(jobID.getJtIdentifier(), jobID.getId(), 0, 0); wafj.setNumberWorkers(jobID, staffID, 0); Map<BSPJobID, WorkerAgentForJob> runningJobs = new ConcurrentHashMap<BSPJobID, WorkerAgentForJob>(); runningJobs.put(jobID, wafj); try { TestUtil.set(wm, "runningJobtoWorkerAgent", runningJobs); } catch (Exception e) { e.printStackTrace(); } int num = wm.getNumberWorkers(jobID, staffID); assertEquals(0, num); wm.setNumberWorkers(jobID, staffID, 2); num = wm.getNumberWorkers(jobID, staffID); assertEquals(2, num); } @Test public void testAddStaffReportCounter() { WorkerAgentForJob wafj = new WorkerAgentForJob( TestUtil.createDonothingObject(WorkerSSControllerInterface.class)); BSPJobID jobID = new BSPJobID(); @SuppressWarnings("unused") StaffAttemptID staffID = new StaffAttemptID(jobID.getJtIdentifier(), jobID.getId(), 0, 0); Map<BSPJobID, WorkerAgentForJob> runningJobs = new ConcurrentHashMap<BSPJobID, WorkerAgentForJob>(); runningJobs.put(jobID, wafj); wm.setRunningJobtoWorkerAgent(runningJobs); wafj.setStaffReportCounter(2); wm.addStaffReportCounter(jobID); assertEquals(3, (int) wafj.getStaffReportCounter()); } @Test public void testSetStaffStatus() { Map<StaffAttemptID, WorkerManager.StaffInProgress> runningStaffs = new ConcurrentHashMap<StaffAttemptID, WorkerManager.StaffInProgress>(); String staffId_one = "attempt_201207241653_0001_000001_0"; StaffAttemptID staffid_one = new StaffAttemptID().forName(staffId_one); WorkerManager.StaffInProgress sip = wm.new StaffInProgress(); Fault fault = new Fault(); runningStaffs.put(staffid_one, sip); wm.setRunningStaffs(runningStaffs); wm.setStaffStatus(staffid_one, 1, fault, 1); assertEquals( true, "RUNNING".equals(wm.getRunningStaffs().get(staffid_one).getStatus() .getRunState().toString())); } @Test public void testGetStaffStatus() { Map<StaffAttemptID, WorkerManager.StaffInProgress> runningStaffs = new ConcurrentHashMap<StaffAttemptID, WorkerManager.StaffInProgress>(); String staffId_one = "attempt_201207241653_0001_000001_0"; StaffAttemptID staffid_one = new StaffAttemptID().forName(staffId_one); WorkerManager.StaffInProgress sip = wm.new StaffInProgress(); BSPJobID jobID = new BSPJobID(); StaffStatus stfs = new StaffStatus(jobID, staffid_one, 0, StaffStatus.State.UNASSIGNED, "running", "slave1", StaffStatus.Phase.STARTING); sip.setStaffStatus(stfs); runningStaffs.put(staffid_one, sip); wm.setRunningStaffs(runningStaffs); String state = wm.getStaffStatus(staffid_one).getStateString(); assertEquals(true, "running".equals(state)); } @Test public void testSetWorkerNametoPartitions() { WorkerAgentForJob wafj = new WorkerAgentForJob( TestUtil.createDonothingObject(WorkerSSControllerInterface.class)); BSPJobID jobID = new BSPJobID(); @SuppressWarnings("unused") StaffAttemptID staffID = new StaffAttemptID(jobID.getJtIdentifier(), jobID.getId(), 0, 0); Map<BSPJobID, WorkerAgentForJob> runningJobs = new ConcurrentHashMap<BSPJobID, WorkerAgentForJob>(); runningJobs.put(jobID, wafj); HashMap<Integer, String> partitionToWorkerManagerName = null; int port = 0; try { partitionToWorkerManagerName = (HashMap<Integer, String>) TestUtil.get( wafj, "partitionToWorkerManagerName"); TestUtil.set(wm, "runningJobtoWorkerAgent", runningJobs); port = (Integer) TestUtil.get(wafj, "portForJob"); } catch (Exception e) { e.printStackTrace(); } wm.setWorkerNametoPartitions(jobID, 5, "TEST"); String check = partitionToWorkerManagerName.get(5); String shouldBe = "TEST:" + port; assertEquals(true, check.equals(shouldBe)); } @Test public void testGetHostName() { String hostNameTest = wm.getHostName(); String hostName = conf.get(Constants.BC_BSP_WORKERAGENT_HOST, Constants.DEFAULT_BC_BSP_WORKERAGENT_HOST); assertEquals(true, hostName.equals(hostNameTest)); } @Test public void testClearFailedJobList() { ArrayList<BSPJobID> failedJobList = new ArrayList<BSPJobID>(); BSPJobID jobID_one = new BSPJobID(); BSPJobID jobID_two = new BSPJobID(); failedJobList.add(jobID_one); failedJobList.add(jobID_two); wm.setFailedJobList(failedJobList); wm.clearFailedJobList(); assertEquals(0, wm.getFailedJobList().size()); } @Test public void testAddFailedJob() { ArrayList<BSPJobID> failedJobList = new ArrayList<BSPJobID>(); BSPJobID jobID_one = new BSPJobID(); failedJobList.add(jobID_one); wm.setFailedJobList(failedJobList); assertEquals(1, wm.getFailedJobList().size()); BSPJobID jobID_two = new BSPJobID(); wm.addFailedJob(jobID_two); assertEquals(2, wm.getFailedJobList().size()); } @Test public void testGetFailedJobCounter() { ArrayList<BSPJobID> failedJobList = new ArrayList<BSPJobID>(); BSPJobID jobID_one = new BSPJobID(); failedJobList.add(jobID_one); wm.setFailedJobList(failedJobList); assertEquals(1, wm.getFailedJobCounter()); } @Test public void testGetFreePort() { int port = wm.getFreePort(); assertEquals(60001, port); } @Test public void testGetMigrateSuperStep() { Map<StaffAttemptID, WorkerManager.StaffInProgress> runningStaffs = new ConcurrentHashMap<StaffAttemptID, WorkerManager.StaffInProgress>(); String staffId_one = "attempt_201207241653_0001_000001_0"; StaffAttemptID staffid_one = new StaffAttemptID().forName(staffId_one); WorkerManager.StaffInProgress sip = wm.new StaffInProgress(); sip.setMigrateSS(2); runningStaffs.put(staffid_one, sip); wm.setRunningStaffs(runningStaffs); int migrateSuperStep = wm.getMigrateSuperStep(staffid_one); assertEquals(2, migrateSuperStep); } @Test public void testClearStaffRC() { WorkerAgentForJob wafj = new WorkerAgentForJob( TestUtil.createDonothingObject(WorkerSSControllerInterface.class)); wafj.setStaffReportCounter(2); BSPJobID jobID = new BSPJobID(); Map<BSPJobID, WorkerAgentForJob> runningJobs = new ConcurrentHashMap<BSPJobID, WorkerAgentForJob>(); runningJobs.put(jobID, wafj); wm.setRunningJobtoWorkerAgent(runningJobs); wm.clearStaffRC(jobID); assertEquals(0, (int) wafj.getStaffReportCounter()); } @Test public void testLocalBarrier() { Map<StaffAttemptID, WorkerManager.StaffInProgress> runningStaffs = new ConcurrentHashMap<StaffAttemptID, WorkerManager.StaffInProgress>(); BSPJobID jobID = new BSPJobID(); StaffAttemptID staffID = new StaffAttemptID(jobID.getJtIdentifier(), jobID.getId(), 0, 0); WorkerManager.StaffInProgress sip = wm.new StaffInProgress(); SuperStepReportContainer ssrc = new SuperStepReportContainer(); runningStaffs.put(staffID, sip); wm.setRunningStaffs(runningStaffs); WorkerAgentForJob wafj = new WorkerAgentForJob( TestUtil.createDonothingObject(WorkerSSControllerInterface.class)); wafj.setStaffReportCounter(2); Map<BSPJobID, WorkerAgentForJob> runningJobs = new ConcurrentHashMap<BSPJobID, WorkerAgentForJob>(); runningJobs.put(jobID, wafj); wm.setRunningJobtoWorkerAgent(runningJobs); wm.localBarrier(jobID, staffID, 0, ssrc); assertEquals(1, runningStaffs.get(staffID).getStatus().getProgress()); assertEquals(false, wm.localBarrier(jobID, staffID, 0, ssrc)); } @Test public void testUpdateWorkerJobState() { WorkerAgentForJob wafj = new WorkerAgentForJob( TestUtil.createDonothingObject(WorkerSSControllerInterface.class)); BSPJobID jobID = new BSPJobID(); StaffAttemptID staffID = new StaffAttemptID(jobID.getJtIdentifier(), jobID.getId(), 0, 0); Map<BSPJobID, WorkerAgentForJob> runningJobs = new ConcurrentHashMap<BSPJobID, WorkerAgentForJob>(); runningJobs.put(jobID, wafj); wm.setRunningJobtoWorkerAgent(runningJobs); assertEquals(false, wm.updateWorkerJobState(staffID)); } @Test public void testInitFileSystem() throws Exception { ControllerProtocolServer cpserver= new ControllerProtocolServer(); InetSocketAddress bspControllerAddr = new InetSocketAddress( "Master.Hadoop", 65001); ControllerProtocol controllerClient = (ControllerProtocol) RPC .waitForProxy(ControllerProtocol.class, 0L, bspControllerAddr, conf); controllerClient = mock(ControllerProtocol.class); when(controllerClient.getSystemDir()).thenReturn("/home/user"); wm.setControllerClient(controllerClient); wm.initFileSystem(); assertEquals(false, wm.isJustInited()); cpserver.stop(); } @Test public void testSetStaffAgentAddress() { Map<StaffAttemptID, WorkerManager.StaffInProgress> runningStaffs = new ConcurrentHashMap<StaffAttemptID, WorkerManager.StaffInProgress>(); String staffId_one = "attempt_201207241653_0001_000001_0"; StaffAttemptID staffid_one = new StaffAttemptID().forName(staffId_one); WorkerManager.StaffInProgress sip = wm.new StaffInProgress(); BSPJobID jobID = new BSPJobID(); runningStaffs.put(staffid_one, sip); wm.setRunningStaffs(runningStaffs); wm.setStaffAgentAddress(staffid_one, "Master.Hadoop:65002"); } //@Test // public void testOfferService() throws Exception { // BSPZookeeper bspzk=wm.getBspzk(); // if (bspzk.equaltostat(Constants.BSPCONTROLLER_LEADER, true)) { // bspzk.create(Constants.BSPCONTROLLER_LEADER, // new byte[0] , BSPIds.OPEN_ACL_UNSAFE, // new BSPCreateModeImpl().getEPHEMERAL()); // // this.becomeActive(); //// LOG.info("acitve address is " + bspControllerAddr); // } // if (bspzk.equaltostat(Constants.BSPCONTROLLER_STANDBY_LEADER, // false)) { // bspzk.create(Constants.BSPCONTROLLER_STANDBY_LEADER, // new byte[0], BSPIds.OPEN_ACL_UNSAFE, // new BSPCreateModeImpl().getPERSISTENT()); // } else { // bspzk.setData(Constants.BSPCONTROLLER_STANDBY_LEADER, // new byte[0], // bspzk.exists(Constants.BSPCONTROLLER_STANDBY_LEADER, false) // .getVersion()); // } // //// if(bspzk.exists("/leader", false)!=null){ //// bspzk.create("/leader", new byte[0] , BSPIds.OPEN_ACL_UNSAFE, //// new BSPCreateModeImpl().getPERSISTENT()); //// } //// // // wm.choseActiveControllerAddress(); //// InetSocketAddress bspControllerAddr = new InetSocketAddress( //// "Master.Hadoop", 65003); //// wm.setBspControllerAddr(bspControllerAddr); //// wm.setStandbyControllerAddr(bspControllerAddr); // // String standControllerAddr = wm.getData( // Constants.BSPCONTROLLER_STANDBY_LEADER); // // InetSocketAddress standbyControllerAddr = NetUtils // .createSocketAddr(standControllerAddr); // System.out.print(standbyControllerAddr.toString()); // wm.setStandbyControllerAddr(standbyControllerAddr); // // // //// String controllerAddr = wm.getData(Constants.BSPCONTROLLER_LEADER); //// InetSocketAddress bspControllerAddr = NetUtils.createSocketAddr(controllerAddr); // wm.setBspControllerAddr(standbyControllerAddr); // // WorkerManagerStatus workerMangerStatus = new WorkerManagerStatus(); // wm.setWorkerMangerStatus(workerMangerStatus); // // List<StaffStatus> reportStaffStatusList = new ArrayList<StaffStatus>(); // Fault fault = new Fault(); // wm.setReportStaffStatusList(reportStaffStatusList); // // Map<StaffAttemptID, WorkerManager.StaffInProgress> runningStaffs = new ConcurrentHashMap<StaffAttemptID, WorkerManager.StaffInProgress>(); // String staffId_one = "attempt_201207241653_0001_000001_0"; // StaffAttemptID staffid_one = new StaffAttemptID().forName(staffId_one); // WorkerManager.StaffInProgress sip = wm.new StaffInProgress(); // StaffStatus stfs = new StaffStatus(); // stfs.setRunState(StaffStatus.State.RUNNING); // sip.setStaffStatus(stfs); // // runningStaffs.put(staffid_one, sip); // wm.setRunningStaffs(runningStaffs); // // Map<StaffAttemptID, WorkerManager.StaffInProgress> reprotStaffsMap = new ConcurrentHashMap<StaffAttemptID, WorkerManager.StaffInProgress>(); // wm.setReprotStaffsMap(reprotStaffsMap); // // List<Fault> workerFaultList = new ArrayList<Fault>(); // wm.setWorkerFaultList(workerFaultList); // wm.offerService(); // // // } @Test public void testEnsureFreshControllerClient() throws IOException { ControllerProtocolServer cpserver=new ControllerProtocolServer(); InetSocketAddress bspControllerAddr = new InetSocketAddress( "Master.Hadoop", 65001); wm.setBspControllerAddr(bspControllerAddr); ControllerProtocol standbyControllerClient =(ControllerProtocol) RPC.waitForProxy( ControllerProtocol.class, ControllerProtocol.versionID, bspControllerAddr, conf); wm.setStandbyControllerClient(standbyControllerClient); InetSocketAddress standbyControllerAddr= new InetSocketAddress( "Master.Hadoop", 65001); wm.setStandbyControllerAddr(standbyControllerAddr); wm.ensureFreshControllerClient(); assertEquals(true,wm.getStandbyControllerAddr().toString().equals(bspControllerAddr.toString())); cpserver.stop(); } @Test public void testGetData() throws IOException, Exception, InterruptedException { BSPZookeeper bspzk=wm.getBspzk(); if (bspzk.equaltostat(Constants.BSPCONTROLLER_LEADER, true)) { bspzk.create(Constants.BSPCONTROLLER_LEADER, new byte[0] , BSPIds.OPEN_ACL_UNSAFE, new BSPCreateModeImpl().getEPHEMERAL()); } wm.setBspzk(bspzk); String controllerAddr = wm.getData(Constants.BSPCONTROLLER_LEADER); assertEquals("",controllerAddr); } @Test public void testChoseActiveControllerAddress() { wm.choseActiveControllerAddress(); } @Test public void testRun() { fail("Not yet implemented"); } @Test public void testClose() { fail("Not yet implemented"); } }
package org.broad.igv.prefs; import javafx.application.Platform; import javafx.beans.value.ChangeListener; import javafx.beans.value.ObservableValue; import javafx.collections.FXCollections; import javafx.embed.swing.JFXPanel; import javafx.geometry.HPos; import javafx.geometry.Insets; import javafx.geometry.Pos; import javafx.scene.Scene; import javafx.scene.control.*; import javafx.scene.control.Button; import javafx.scene.control.Label; import javafx.scene.control.ScrollPane; import javafx.scene.control.TextField; import javafx.scene.layout.*; import javafx.stage.Stage; import org.broad.igv.DirectoryManager; import org.broad.igv.Globals; import org.broad.igv.ui.IGV; import org.broad.igv.ui.util.FileDialogUtils; import org.broad.igv.ui.util.UIUtilities; import javax.swing.*; import java.awt.*; import java.beans.EventHandler; import java.io.*; import java.util.*; import java.util.List; public class PreferenceEditorFX { public static void main(String[] args) throws IOException { open(null); } public static void open(Frame parent) throws IOException { List<PreferencesManager.PreferenceGroup> preferenceGroups = PreferencesManager.loadPreferenceList(); SwingUtilities.invokeLater(() -> { JDialog frame = new JDialog(parent, "Preferences", true); final JFXPanel fxPanel = new JFXPanel(); Platform.runLater(() -> initFX(frame, fxPanel, preferenceGroups)); frame.add(fxPanel); frame.pack(); frame.setSize(800, 600); frame.setLocationRelativeTo(parent); frame.setVisible(true); frame.setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE); }); } private static void initFX(final JDialog parent, JFXPanel fxPanel, List<PreferencesManager.PreferenceGroup> preferenceGroups) { // final Map<String, String> updatedPrefs = new HashMap<>(); TabPane tabPane = new TabPane(); BorderPane borderPane = new BorderPane(); borderPane.setCenter(tabPane); Scene scene = new Scene(borderPane); Map<String, Map<String, String>> updatedPreferencesMap = new HashMap<>(); for (PreferencesManager.PreferenceGroup entry : preferenceGroups) { if(entry.tabLabel.equals("Hidden")) continue; final IGVPreferences preferences = PreferencesManager.getPreferences(entry.category); final Map<String, String> updatedPrefs = updatedPreferencesMap.containsKey(entry.category) ? updatedPreferencesMap.get(entry.category) : new HashMap<>(); updatedPreferencesMap.put(entry.category, updatedPrefs); final String tabLabel = entry.tabLabel; Tab tab = new Tab(tabLabel); tab.setClosable(false); tabPane.getTabs().add(tab); ScrollPane scrollPane = new ScrollPane(); tab.setContent(scrollPane); VBox vBox = new VBox(); vBox.setFillWidth(true); // vBox.prefWidthProperty().bind(pane.widthProperty()); scrollPane.setContent(vBox); GridPane gridPane = new GridPane(); gridPane.setHgap(5); gridPane.setVgap(5); vBox.getChildren().add(gridPane); String currentGroup = null; int row = 1; for (PreferencesManager.Preference pref : entry.preferences) { try { Tooltip tooltip = pref.getComment() == null ? null : new Tooltip(pref.getComment()); if (pref.getKey().equals(PreferencesManager.SEPARATOR_KEY)) { Separator sep = new Separator(); GridPane.setColumnSpan(sep, 4); gridPane.add(sep, 1, row); row++; continue; } if (pref.getKey().equals(PreferencesManager.INFO_KEY)) { row++; Label label = new Label(pref.getLabel()); label.setStyle("-fx-font-size:16"); label.setStyle("-fx-font-weight: bold"); GridPane.setColumnSpan(label, 4); gridPane.add(label, 1, row); row += 2; continue; } if (pref.group != null && !pref.group.equals(currentGroup)) { // Start a new group row = 0; currentGroup = pref.group; gridPane = new GridPane(); gridPane.setHgap(5); gridPane.setVgap(5); vBox.getChildren().add(gridPane); TitledPane tp = new TitledPane(currentGroup, gridPane); tp.setCollapsible(false); vBox.getChildren().add(tp); vBox.setMargin(tp, new Insets(10, 0, 0, 0)); } if (pref.getType().equals("boolean")) { CheckBox cb = new CheckBox(pref.getLabel()); cb.setSelected(preferences.getAsBoolean(pref.getKey())); cb.setOnAction(event -> { updatedPrefs.put(pref.getKey(), Boolean.toString(cb.isSelected())); System.out.println("Set " + pref.getLabel() + ": " + cb.isSelected()); }); GridPane.setColumnSpan(cb, 2); gridPane.add(cb, 1, row); if (tooltip != null) { cb.setTooltip(tooltip); } } else if (pref.getType().startsWith("select")) { Label label = new Label(pref.getLabel()); String[] selections = Globals.whitespacePattern.split(pref.getType())[1].split("\\|"); final ComboBox comboBox = new ComboBox(FXCollections.observableArrayList(Arrays.asList(selections))); comboBox.valueProperty().setValue(pref.getDefaultValue()); comboBox.valueProperty().addListener((ov, t, t1) -> { System.out.println("Set " + pref.getLabel() + " " + comboBox.valueProperty().toString()); }); gridPane.add(label, 1, row); GridPane.setColumnSpan(comboBox, 3); gridPane.add(comboBox, 2, row); if (tooltip != null) { label.setTooltip(tooltip); comboBox.setTooltip(tooltip); } } else { Label label = new Label(pref.getLabel()); TextField field = new TextField(preferences.get(pref.getKey())); field.setPrefWidth(500); field.setOnAction(event -> { final String text = field.getText(); if (validate(text, pref.getType())) { updatedPrefs.put(pref.getKey(), text); } else { field.setText(preferences.get(pref.getKey())); } }); field.focusedProperty().addListener((observable, oldValue, newValue) -> { if (newValue == false) { final String text = field.getText(); if (validate(text, pref.getType())) { updatedPrefs.put(pref.getKey(), text); } else { field.setText(preferences.get(pref.getKey())); } } }); gridPane.add(label, 1, row); gridPane.add(field, 2, row); if (tooltip != null) { label.setTooltip(tooltip); field.setTooltip(tooltip); } } row++; } catch (Exception e) { e.printStackTrace(); } } if (tabLabel.equalsIgnoreCase("Advanced")) { // Add IGV directory management at the end. This is a special case String currentDirectory = DirectoryManager.getIgvDirectory().getAbsolutePath(); final Label currentDirectoryLabel = new Label("IGV Directory: " + currentDirectory); final Button moveButton = new Button("Move..."); row++; gridPane.add(currentDirectoryLabel, 1, row); GridPane.setHalignment(moveButton, HPos.RIGHT); gridPane.add(moveButton, 2, row); moveButton.setOnAction(event -> { // Do this on the Swing thread until we port to javafx file dialog UIUtilities.invokeOnEventThread(() -> { final File igvDirectory = DirectoryManager.getIgvDirectory(); final File newDirectory = FileDialogUtils.chooseDirectory("Select IGV directory", DirectoryManager.getUserDirectory()); if (newDirectory != null && !newDirectory.equals(igvDirectory)) { DirectoryManager.moveIGVDirectory(newDirectory); Platform.runLater(() -> currentDirectoryLabel.setText(newDirectory.getAbsolutePath())); } }); }); } } HBox hbox = new HBox(); hbox.setAlignment(Pos.CENTER_RIGHT); hbox.setPadding(new Insets(15, 12, 15, 12)); hbox.setSpacing(5); hbox.setStyle("-fx-background-color: #336699;"); Button cancelBUtton = new Button("Cancel"); cancelBUtton.setPrefSize(100, 20); cancelBUtton.setOnAction((event) -> { SwingUtilities.invokeLater(() -> parent.setVisible(false)); }); Button saveButton = new Button("Save"); saveButton.setPrefSize(100, 20); saveButton.setDefaultButton(true); saveButton.setOnAction((event) -> { PreferencesManager.updateAll(updatedPreferencesMap); SwingUtilities.invokeLater(() -> parent.setVisible(false)); if(IGV.hasInstance()) { IGV.getInstance().doRefresh(); } }); hbox.getChildren().addAll(cancelBUtton, saveButton); borderPane.setBottom(hbox); fxPanel.setScene(scene); } private static boolean validate(String text, String type) { if (type.equals("integer")) { try { Integer.parseInt(text); } catch (NumberFormatException e) { return false; } } else if (type.equals("float")) { try { Double.parseDouble(text); } catch (NumberFormatException e) { return false; } } return true; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.phoenix.end2end; import org.apache.phoenix.schema.TypeMismatchException; import org.junit.Test; import java.sql.*; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; public class ArrayConcatFunctionIT extends ParallelStatsDisabledIT { private String initTables(Connection conn) throws Exception { String tableName = generateUniqueName(); String ddl = "CREATE TABLE " + tableName + " (region_name VARCHAR PRIMARY KEY,varchars VARCHAR[],integers INTEGER[],doubles DOUBLE[],bigints BIGINT[],chars CHAR(15)[],double1 DOUBLE,char1 CHAR(17),nullcheck INTEGER,chars2 CHAR(15)[])"; conn.createStatement().execute(ddl); String dml = "UPSERT INTO " + tableName + "(region_name,varchars,integers,doubles,bigints,chars,double1,char1,nullcheck,chars2) VALUES('SF Bay Area'," + "ARRAY['2345','46345','23234']," + "ARRAY[2345,46345,23234,456]," + "ARRAY[23.45,46.345,23.234,45.6,5.78]," + "ARRAY[12,34,56,78,910]," + "ARRAY['a','bbbb','c','ddd','e']," + "23.45," + "'wert'," + "NULL," + "ARRAY['a','bbbb','c','ddd','e','foo']" + ")"; PreparedStatement stmt = conn.prepareStatement(dml); stmt.execute(); conn.commit(); return tableName; } @Test public void testArrayConcatFunctionVarchar() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); String tableName = initTables(conn); ResultSet rs; rs = conn.createStatement().executeQuery("SELECT ARRAY_CAT(varchars,varchars) FROM " + tableName + " WHERE region_name = 'SF Bay Area'"); assertTrue(rs.next()); String[] strings = new String[]{"2345", "46345", "23234", "2345", "46345", "23234"}; Array array = conn.createArrayOf("VARCHAR", strings); assertEquals(array, rs.getArray(1)); assertFalse(rs.next()); } @Test public void testArrayConcatFunctionInteger() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); String tableName = initTables(conn); ResultSet rs; rs = conn.createStatement().executeQuery("SELECT ARRAY_CAT(integers,integers) FROM " + tableName + " WHERE region_name = 'SF Bay Area'"); assertTrue(rs.next()); Integer[] integers = new Integer[]{2345, 46345, 23234, 456, 2345, 46345, 23234, 456}; Array array = conn.createArrayOf("INTEGER", integers); assertEquals(array, rs.getArray(1)); assertFalse(rs.next()); } @Test public void testArrayConcatFunctionDouble() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); String tableName = initTables(conn); ResultSet rs; rs = conn.createStatement().executeQuery("SELECT ARRAY_CAT(doubles,doubles) FROM " + tableName + " WHERE region_name = 'SF Bay Area'"); assertTrue(rs.next()); Double[] doubles = new Double[]{23.45, 46.345, 23.234, 45.6, 5.78, 23.45, 46.345, 23.234, 45.6, 5.78}; Array array = conn.createArrayOf("DOUBLE", doubles); assertEquals(array, rs.getArray(1)); assertFalse(rs.next()); } @Test public void testArrayConcatFunctionDouble2() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); String tableName = initTables(conn); ResultSet rs; rs = conn.createStatement().executeQuery("SELECT ARRAY_CAT(doubles,ARRAY[23]) FROM " + tableName + " WHERE region_name = 'SF Bay Area'"); assertTrue(rs.next()); Double[] doubles = new Double[]{23.45, 46.345, 23.234, 45.6, 5.78, new Double(23)}; Array array = conn.createArrayOf("DOUBLE", doubles); assertEquals(array, rs.getArray(1)); assertFalse(rs.next()); } @Test public void testArrayConcatFunctionBigint() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); String tableName = initTables(conn); ResultSet rs; rs = conn.createStatement().executeQuery("SELECT ARRAY_CAT(bigints,bigints) FROM " + tableName + " WHERE region_name = 'SF Bay Area'"); assertTrue(rs.next()); Long[] longs = new Long[]{12l, 34l, 56l, 78l, 910l, 12l, 34l, 56l, 78l, 910l}; Array array = conn.createArrayOf("BIGINT", longs); assertEquals(array, rs.getArray(1)); assertFalse(rs.next()); } @Test public void testArrayConcatFunctionChar() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); String tableName = initTables(conn); ResultSet rs; rs = conn.createStatement().executeQuery("SELECT ARRAY_CAT(chars,chars) FROM " + tableName + " WHERE region_name = 'SF Bay Area'"); assertTrue(rs.next()); String[] strings = new String[]{"a", "bbbb", "c", "ddd", "e", "a", "bbbb", "c", "ddd", "e"}; Array array = conn.createArrayOf("CHAR", strings); assertEquals(array, rs.getArray(1)); assertFalse(rs.next()); } @Test public void testArrayConcatFunctionChar3() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); String tableName = initTables(conn); ResultSet rs; rs = conn.createStatement().executeQuery("SELECT ARRAY_CAT(chars,chars2) FROM " + tableName + " WHERE region_name = 'SF Bay Area'"); assertTrue(rs.next()); String[] strings = new String[]{"a", "bbbb", "c", "ddd", "e", "a", "bbbb", "c", "ddd", "e", "foo"}; Array array = conn.createArrayOf("CHAR", strings); assertEquals(array, rs.getArray(1)); assertFalse(rs.next()); } @Test(expected = TypeMismatchException.class) public void testArrayConcatFunctionIntToCharArray() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); String tableName = initTables(conn); ResultSet rs; rs = conn.createStatement().executeQuery("SELECT ARRAY_CAT(varchars,ARRAY[23,45]) FROM " + tableName + " WHERE region_name = 'SF Bay Area'"); } @Test(expected = TypeMismatchException.class) public void testArrayConcatFunctionVarcharToIntegerArray() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); String tableName = initTables(conn); ResultSet rs; rs = conn.createStatement().executeQuery("SELECT ARRAY_CAT(integers,ARRAY['a', 'b']) FROM " + tableName + " WHERE region_name = 'SF Bay Area'"); } @Test(expected = SQLException.class) public void testArrayConcatFunctionChar2() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); String tableName = initTables(conn); ResultSet rs; rs = conn.createStatement().executeQuery("SELECT ARRAY_CAT(chars,ARRAY['facfacfacfacfacfacfac','facfacfacfacfacfacfac']) FROM " + tableName + " WHERE region_name = 'SF Bay Area'"); rs.next(); rs.getArray(1); } @Test public void testArrayConcatFunctionIntegerArrayToDoubleArray() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); String tableName = initTables(conn); ResultSet rs; rs = conn.createStatement().executeQuery("SELECT ARRAY_CAT(doubles,ARRAY[45, 55]) FROM " + tableName + " WHERE region_name = 'SF Bay Area'"); assertTrue(rs.next()); Double[] doubles = new Double[]{23.45, 46.345, 23.234, 45.6, 5.78, 45.0, 55.0}; Array array = conn.createArrayOf("DOUBLE", doubles); assertEquals(array, rs.getArray(1)); assertFalse(rs.next()); } @Test public void testArrayConcatFunctionWithNestedFunctions1() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); String tableName = initTables(conn); ResultSet rs; rs = conn.createStatement().executeQuery("SELECT ARRAY_CAT(ARRAY[23,45],ARRAY[integers[1],integers[1]]) FROM " + tableName + " WHERE region_name = 'SF Bay Area'"); assertTrue(rs.next()); Integer[] integers = new Integer[]{23, 45, 2345, 2345}; Array array = conn.createArrayOf("INTEGER", integers); assertEquals(array, rs.getArray(1)); assertFalse(rs.next()); } @Test public void testArrayConcatFunctionWithNestedFunctions2() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); String tableName = initTables(conn); ResultSet rs; rs = conn.createStatement().executeQuery("SELECT ARRAY_CAT(integers,ARRAY[ARRAY_ELEM(ARRAY[2,4],1),ARRAY_ELEM(ARRAY[2,4],2)]) FROM " + tableName + " WHERE region_name = 'SF Bay Area'"); assertTrue(rs.next()); Integer[] integers = new Integer[]{2345, 46345, 23234, 456, 2, 4}; Array array = conn.createArrayOf("INTEGER", integers); assertEquals(array, rs.getArray(1)); assertFalse(rs.next()); } @Test public void testArrayConcatFunctionWithNestedFunctions3() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); String tableName = initTables(conn); ResultSet rs; rs = conn.createStatement().executeQuery("SELECT ARRAY_CAT(doubles,ARRAY[ARRAY_ELEM(doubles, 1), ARRAY_ELEM(doubles, 1)]) FROM " + tableName + " WHERE region_name = 'SF Bay Area'"); assertTrue(rs.next()); Double[] doubles = new Double[]{23.45, 46.345, 23.234, 45.6, 5.78, 23.45, 23.45}; Array array = conn.createArrayOf("DOUBLE", doubles); assertEquals(array, rs.getArray(1)); assertFalse(rs.next()); } @Test public void testArrayConcatFunctionWithUpsert1() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); String tableName = generateUniqueName(); String ddl = "CREATE TABLE " + tableName + " (region_name VARCHAR PRIMARY KEY,varchars VARCHAR[])"; conn.createStatement().execute(ddl); String dml = "UPSERT INTO " + tableName + "(region_name,varchars) VALUES('SF Bay Area',ARRAY_CAT(ARRAY['hello','world'],ARRAY[':-)']))"; conn.createStatement().execute(dml); conn.commit(); ResultSet rs; rs = conn.createStatement().executeQuery("SELECT varchars FROM " + tableName + " WHERE region_name = 'SF Bay Area'"); assertTrue(rs.next()); String[] strings = new String[]{"hello", "world", ":-)"}; Array array = conn.createArrayOf("VARCHAR", strings); assertEquals(array, rs.getArray(1)); assertFalse(rs.next()); } @Test public void testArrayConcatFunctionWithUpsert2() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); String tableName = generateUniqueName(); String ddl = "CREATE TABLE " + tableName + " (region_name VARCHAR PRIMARY KEY,integers INTEGER[])"; conn.createStatement().execute(ddl); String dml = "UPSERT INTO " + tableName + "(region_name,integers) VALUES('SF Bay Area',ARRAY_CAT(ARRAY[4,5],ARRAY[6, 7]))"; conn.createStatement().execute(dml); conn.commit(); ResultSet rs; rs = conn.createStatement().executeQuery("SELECT integers FROM " + tableName + " WHERE region_name = 'SF Bay Area'"); assertTrue(rs.next()); Integer[] integers = new Integer[]{4, 5, 6, 7}; Array array = conn.createArrayOf("INTEGER", integers); assertEquals(array, rs.getArray(1)); assertFalse(rs.next()); } @Test public void testArrayConcatFunctionWithUpsert3() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); String tableName = generateUniqueName(); String ddl = "CREATE TABLE " + tableName + " (region_name VARCHAR PRIMARY KEY,doubles DOUBLE[])"; conn.createStatement().execute(ddl); String dml = "UPSERT INTO " + tableName + "(region_name,doubles) VALUES('SF Bay Area',ARRAY_CAT(ARRAY[5.67,7.87],ARRAY[9.0, 8.0]))"; conn.createStatement().execute(dml); conn.commit(); ResultSet rs; rs = conn.createStatement().executeQuery("SELECT doubles FROM " + tableName + " WHERE region_name = 'SF Bay Area'"); assertTrue(rs.next()); Double[] doubles = new Double[]{5.67, 7.87, new Double(9), new Double(8)}; Array array = conn.createArrayOf("DOUBLE", doubles); assertEquals(array, rs.getArray(1)); assertFalse(rs.next()); } @Test public void testArrayConcatFunctionWithUpsertSelect1() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); String sourceTableName = generateUniqueName(); String targetTableName = generateUniqueName(); String ddl = "CREATE TABLE " + sourceTableName + " (region_name VARCHAR PRIMARY KEY,doubles DOUBLE[])"; conn.createStatement().execute(ddl); ddl = "CREATE TABLE " + targetTableName + " (region_name VARCHAR PRIMARY KEY,doubles DOUBLE[])"; conn.createStatement().execute(ddl); String dml = "UPSERT INTO " + sourceTableName + "(region_name,doubles) VALUES('SF Bay Area',ARRAY_CAT(ARRAY[5.67,7.87],ARRAY[9.0, 4.0]))"; conn.createStatement().execute(dml); dml = "UPSERT INTO " + sourceTableName + "(region_name,doubles) VALUES('SF Bay Area2',ARRAY_CAT(ARRAY[56.7,7.87],ARRAY[9.2, 3.4]))"; conn.createStatement().execute(dml); conn.commit(); dml = "UPSERT INTO " + targetTableName + "(region_name, doubles) SELECT region_name, ARRAY_CAT(doubles,doubles) FROM " + sourceTableName ; conn.createStatement().execute(dml); conn.commit(); ResultSet rs; rs = conn.createStatement().executeQuery("SELECT doubles FROM " + targetTableName ); assertTrue(rs.next()); Double[] doubles = new Double[]{5.67, 7.87, new Double(9), new Double(4), 5.67, 7.87, new Double(9), new Double(4)}; Array array = conn.createArrayOf("DOUBLE", doubles); assertEquals(array, rs.getArray(1)); assertTrue(rs.next()); doubles = new Double[]{56.7, 7.87, new Double(9.2), new Double(3.4), 56.7, 7.87, new Double(9.2), new Double(3.4)}; array = conn.createArrayOf("DOUBLE", doubles); assertEquals(array, rs.getArray(1)); assertFalse(rs.next()); } @Test public void testArrayConcatFunctionWithUpsertSelect2() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); String sourceTableName = generateUniqueName(); String targetTableName = generateUniqueName(); String ddl = "CREATE TABLE " + sourceTableName + " (region_name VARCHAR PRIMARY KEY,varchars VARCHAR[])"; conn.createStatement().execute(ddl); ddl = "CREATE TABLE " + targetTableName + " (region_name VARCHAR PRIMARY KEY,varchars VARCHAR[])"; conn.createStatement().execute(ddl); String dml = "UPSERT INTO " + sourceTableName + "(region_name,varchars) VALUES('SF Bay Area',ARRAY_CAT(ARRAY['abcd','b'],ARRAY['c', 'd']))"; conn.createStatement().execute(dml); dml = "UPSERT INTO " + sourceTableName + "(region_name,varchars) VALUES('SF Bay Area2',ARRAY_CAT(ARRAY['d','fgh'],ARRAY['something','something']))"; conn.createStatement().execute(dml); conn.commit(); dml = "UPSERT INTO " + targetTableName + "(region_name, varchars) SELECT region_name, ARRAY_CAT(varchars,varchars) FROM " + sourceTableName ; conn.createStatement().execute(dml); conn.commit(); ResultSet rs; rs = conn.createStatement().executeQuery("SELECT varchars FROM " + targetTableName ); assertTrue(rs.next()); String[] strings = new String[]{"abcd", "b", "c", "d", "abcd", "b", "c", "d"}; Array array = conn.createArrayOf("VARCHAR", strings); assertEquals(array, rs.getArray(1)); assertTrue(rs.next()); strings = new String[]{"d", "fgh", "something", "something", "d", "fgh", "something", "something"}; array = conn.createArrayOf("VARCHAR", strings); assertEquals(array, rs.getArray(1)); assertFalse(rs.next()); } @Test public void testArrayConcatFunctionInWhere1() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); String tableName = initTables(conn); ResultSet rs; rs = conn.createStatement().executeQuery("SELECT region_name FROM " + tableName + " WHERE ARRAY[2345,46345,23234,456,123]=ARRAY_CAT(integers,ARRAY[123])"); assertTrue(rs.next()); assertEquals("SF Bay Area", rs.getString(1)); assertFalse(rs.next()); } @Test public void testArrayConcatFunctionInWhere2() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); String tableName = initTables(conn); ResultSet rs; rs = conn.createStatement().executeQuery("SELECT region_name FROM " + tableName + " WHERE varchars[1]=ANY(ARRAY_CAT(ARRAY['2345','46345','23234'],ARRAY['1234']))"); assertTrue(rs.next()); assertEquals("SF Bay Area", rs.getString(1)); assertFalse(rs.next()); } @Test public void testArrayConcatFunctionInWhere3() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); String tableName = initTables(conn); ResultSet rs; rs = conn.createStatement().executeQuery("SELECT region_name FROM " + tableName + " WHERE ARRAY['2345','46345','23234','1234','234']=ARRAY_CAT(ARRAY['2345','46345','23234'],ARRAY['1234', '234'])"); assertTrue(rs.next()); assertEquals("SF Bay Area", rs.getString(1)); assertFalse(rs.next()); } @Test public void testArrayConcatFunctionInWhere4() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); String tableName = initTables(conn); ResultSet rs; rs = conn.createStatement().executeQuery("SELECT region_name FROM " + tableName + " WHERE ARRAY[23.45,4634.5,2.3234,123.4,12.0]=ARRAY_CAT(ARRAY[23.45,4634.5,2.3234],ARRAY[123.4,12.0])"); assertTrue(rs.next()); assertEquals("SF Bay Area", rs.getString(1)); assertFalse(rs.next()); } @Test public void testArrayConcatFunctionInWhere5() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); String tableName = initTables(conn); ResultSet rs; rs = conn.createStatement().executeQuery("SELECT region_name FROM " + tableName + " WHERE ARRAY['2345','46345','23234','foo','foo']=ARRAY_CAT(varchars,ARRAY['foo','foo'])"); assertTrue(rs.next()); assertEquals("SF Bay Area", rs.getString(1)); assertFalse(rs.next()); } @Test public void testArrayConcatFunctionInWhere6() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); String tableName = initTables(conn); ResultSet rs; rs = conn.createStatement().executeQuery("SELECT region_name FROM " + tableName + " WHERE chars2=ARRAY_CAT(chars,ARRAY['foo'])"); assertTrue(rs.next()); assertEquals("SF Bay Area", rs.getString(1)); assertFalse(rs.next()); } @Test public void testArrayConcatFunctionInWhere7() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); String tableName = initTables(conn); ResultSet rs; rs = conn.createStatement().executeQuery("SELECT region_name FROM " + tableName + " WHERE ARRAY[2,3,4,5]=ARRAY_CAT(ARRAY[2,3],ARRAY[4,5])"); assertTrue(rs.next()); assertEquals("SF Bay Area", rs.getString(1)); assertFalse(rs.next()); } @Test public void testArrayConcatFunctionWithNulls1() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); String tableName = initTables(conn); ResultSet rs; PreparedStatement st = conn.prepareStatement("SELECT ARRAY_CAT(?,?) FROM " + tableName + " WHERE region_name = 'SF Bay Area'"); Array array1 = conn.createArrayOf("VARCHAR", new Object[]{"a", "b", "c", null}); st.setArray(1, array1); Array array2 = conn.createArrayOf("VARCHAR", new Object[]{"a", "b", "c"}); st.setArray(2, array2); rs = st.executeQuery(); assertTrue(rs.next()); Array expected = conn.createArrayOf("VARCHAR", new Object[]{"a", "b", "c", null, "a", "b", "c"}); assertEquals(expected, rs.getArray(1)); assertFalse(rs.next()); } @Test public void testArrayConcatFunctionWithNulls2() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); String tableName = initTables(conn); ResultSet rs; PreparedStatement st = conn.prepareStatement("SELECT ARRAY_CAT(?,?) FROM " + tableName + " WHERE region_name = 'SF Bay Area'"); Array array1 = conn.createArrayOf("VARCHAR", new Object[]{"a", "b", "c"}); st.setArray(1, array1); Array array2 = conn.createArrayOf("VARCHAR", new Object[]{null, "a", "b", "c"}); st.setArray(2, array2); rs = st.executeQuery(); assertTrue(rs.next()); Array expected = conn.createArrayOf("VARCHAR", new Object[]{"a", "b", "c", null, "a", "b", "c"}); assertEquals(expected, rs.getArray(1)); assertFalse(rs.next()); } @Test public void testArrayConcatFunctionWithNulls3() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); String tableName = initTables(conn); ResultSet rs; PreparedStatement st = conn.prepareStatement("SELECT ARRAY_CAT(?,?) FROM " + tableName + " WHERE region_name = 'SF Bay Area'"); Array array1 = conn.createArrayOf("VARCHAR", new Object[]{"a", "b", "c", null}); st.setArray(1, array1); Array array2 = conn.createArrayOf("VARCHAR", new Object[]{null, "a", "b", "c"}); st.setArray(2, array2); rs = st.executeQuery(); assertTrue(rs.next()); Array expected = conn.createArrayOf("VARCHAR", new Object[]{"a", "b", "c", null, null, "a", "b", "c"}); assertEquals(expected, rs.getArray(1)); assertFalse(rs.next()); } @Test public void testArrayConcatFunctionWithNulls4() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); String tableName = initTables(conn); ResultSet rs; PreparedStatement st = conn.prepareStatement("SELECT ARRAY_CAT(?,?) FROM " + tableName + " WHERE region_name = 'SF Bay Area'"); Array array1 = conn.createArrayOf("VARCHAR", new Object[]{null, "a", null, "b", "c", null, null}); st.setArray(1, array1); Array array2 = conn.createArrayOf("VARCHAR", new Object[]{null, null, "a", null, "b", null, "c", null}); st.setArray(2, array2); rs = st.executeQuery(); assertTrue(rs.next()); Array expected = conn.createArrayOf("VARCHAR", new Object[]{null, "a", null, "b", "c", null, null, null, null, "a", null, "b", null, "c", null}); assertEquals(expected, rs.getArray(1)); assertFalse(rs.next()); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.calcite.adapter.jdbc; import org.apache.calcite.DataContext; import org.apache.calcite.adapter.enumerable.EnumerableRel; import org.apache.calcite.adapter.enumerable.EnumerableRelImplementor; import org.apache.calcite.adapter.enumerable.JavaRowFormat; import org.apache.calcite.adapter.enumerable.PhysType; import org.apache.calcite.adapter.enumerable.PhysTypeImpl; import org.apache.calcite.adapter.java.JavaTypeFactory; import org.apache.calcite.linq4j.tree.BlockBuilder; import org.apache.calcite.linq4j.tree.ConstantExpression; import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; import org.apache.calcite.linq4j.tree.ParameterExpression; import org.apache.calcite.linq4j.tree.Primitive; import org.apache.calcite.linq4j.tree.UnaryExpression; import org.apache.calcite.plan.ConventionTraitDef; import org.apache.calcite.plan.RelOptCluster; import org.apache.calcite.plan.RelOptCost; import org.apache.calcite.plan.RelOptPlanner; import org.apache.calcite.plan.RelTraitSet; import org.apache.calcite.prepare.CalcitePrepareImpl; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.convert.ConverterImpl; import org.apache.calcite.rel.metadata.RelMetadataQuery; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.runtime.Hook; import org.apache.calcite.runtime.SqlFunctions; import org.apache.calcite.schema.Schemas; import org.apache.calcite.sql.SqlDialect; import org.apache.calcite.sql.type.SqlTypeName; import org.apache.calcite.sql.util.SqlString; import org.apache.calcite.util.BuiltInMethod; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.Calendar; import java.util.List; import java.util.TimeZone; import java.util.stream.Collectors; /** * Relational expression representing a scan of a table in a JDBC data source. */ public class JdbcToEnumerableConverter extends ConverterImpl implements EnumerableRel { protected JdbcToEnumerableConverter( RelOptCluster cluster, RelTraitSet traits, RelNode input) { super(cluster, ConventionTraitDef.INSTANCE, traits, input); } @Override public RelNode copy(RelTraitSet traitSet, List<RelNode> inputs) { return new JdbcToEnumerableConverter( getCluster(), traitSet, sole(inputs)); } @Override public RelOptCost computeSelfCost(RelOptPlanner planner, RelMetadataQuery mq) { return super.computeSelfCost(planner, mq).multiplyBy(.1); } public Result implement(EnumerableRelImplementor implementor, Prefer pref) { // Generate: // ResultSetEnumerable.of(schema.getDataSource(), "select ...") final BlockBuilder builder0 = new BlockBuilder(false); final JdbcRel child = (JdbcRel) getInput(); final PhysType physType = PhysTypeImpl.of( implementor.getTypeFactory(), getRowType(), pref.prefer(JavaRowFormat.CUSTOM)); final JdbcConvention jdbcConvention = (JdbcConvention) child.getConvention(); SqlString sqlString = generateSql(jdbcConvention.dialect); String sql = sqlString.getSql(); if (CalcitePrepareImpl.DEBUG) { System.out.println("[" + sql + "]"); } Hook.QUERY_PLAN.run(sql); final Expression sql_ = builder0.append("sql", Expressions.constant(sql)); final int fieldCount = getRowType().getFieldCount(); BlockBuilder builder = new BlockBuilder(); final ParameterExpression resultSet_ = Expressions.parameter(Modifier.FINAL, ResultSet.class, builder.newName("resultSet")); final SqlDialect.CalendarPolicy calendarPolicy = jdbcConvention.dialect.getCalendarPolicy(); final Expression calendar_; switch (calendarPolicy) { case LOCAL: calendar_ = builder0.append("calendar", Expressions.call(Calendar.class, "getInstance", getTimeZoneExpression(implementor))); break; default: calendar_ = null; } if (fieldCount == 1) { final ParameterExpression value_ = Expressions.parameter(Object.class, builder.newName("value")); builder.add(Expressions.declare(Modifier.FINAL, value_, null)); generateGet(implementor, physType, builder, resultSet_, 0, value_, calendar_, calendarPolicy); builder.add(Expressions.return_(null, value_)); } else { final Expression values_ = builder.append("values", Expressions.newArrayBounds(Object.class, 1, Expressions.constant(fieldCount))); for (int i = 0; i < fieldCount; i++) { generateGet(implementor, physType, builder, resultSet_, i, Expressions.arrayIndex(values_, Expressions.constant(i)), calendar_, calendarPolicy); } builder.add( Expressions.return_(null, values_)); } final ParameterExpression e_ = Expressions.parameter(SQLException.class, builder.newName("e")); final Expression rowBuilderFactory_ = builder0.append("rowBuilderFactory", Expressions.lambda( Expressions.block( Expressions.return_(null, Expressions.lambda( Expressions.block( Expressions.tryCatch( builder.toBlock(), Expressions.catch_( e_, Expressions.throw_( Expressions.new_( RuntimeException.class, e_)))))))), resultSet_)); final Expression enumerable; if (sqlString.getDynamicParameters() != null && !sqlString.getDynamicParameters().isEmpty()) { final Expression preparedStatementConsumer_ = builder0.append("preparedStatementConsumer", Expressions.call(BuiltInMethod.CREATE_ENRICHER.method, Expressions.newArrayInit(Integer.class, 1, toIndexesTableExpression(sqlString)), DataContext.ROOT)); enumerable = builder0.append("enumerable", Expressions.call( BuiltInMethod.RESULT_SET_ENUMERABLE_OF_PREPARED.method, Expressions.call( Schemas.unwrap(jdbcConvention.expression, JdbcSchema.class), BuiltInMethod.JDBC_SCHEMA_DATA_SOURCE.method), sql_, rowBuilderFactory_, preparedStatementConsumer_)); } else { enumerable = builder0.append( "enumerable", Expressions.call( BuiltInMethod.RESULT_SET_ENUMERABLE_OF.method, Expressions.call( Schemas.unwrap(jdbcConvention.expression, JdbcSchema.class), BuiltInMethod.JDBC_SCHEMA_DATA_SOURCE.method), sql_, rowBuilderFactory_)); } builder0.add( Expressions.return_(null, enumerable)); return implementor.result(physType, builder0.toBlock()); } private List<ConstantExpression> toIndexesTableExpression(SqlString sqlString) { return sqlString.getDynamicParameters().stream() .map(Expressions::constant) .collect(Collectors.toList()); } private UnaryExpression getTimeZoneExpression( EnumerableRelImplementor implementor) { return Expressions.convert_( Expressions.call( implementor.getRootExpression(), "get", Expressions.constant("timeZone")), TimeZone.class); } private void generateGet(EnumerableRelImplementor implementor, PhysType physType, BlockBuilder builder, ParameterExpression resultSet_, int i, Expression target, Expression calendar_, SqlDialect.CalendarPolicy calendarPolicy) { final Primitive primitive = Primitive.ofBoxOr(physType.fieldClass(i)); final RelDataType fieldType = physType.getRowType().getFieldList().get(i).getType(); final List<Expression> dateTimeArgs = new ArrayList<>(); dateTimeArgs.add(Expressions.constant(i + 1)); SqlTypeName sqlTypeName = fieldType.getSqlTypeName(); boolean offset = false; switch (calendarPolicy) { case LOCAL: dateTimeArgs.add(calendar_); break; case NULL: // We don't specify a calendar at all, so we don't add an argument and // instead use the version of the getXXX that doesn't take a Calendar break; case DIRECT: sqlTypeName = SqlTypeName.ANY; break; case SHIFT: switch (sqlTypeName) { case TIMESTAMP: case DATE: offset = true; } break; } final Expression source; switch (sqlTypeName) { case DATE: case TIME: case TIMESTAMP: source = Expressions.call( getMethod(sqlTypeName, fieldType.isNullable(), offset), Expressions.<Expression>list() .append( Expressions.call(resultSet_, getMethod2(sqlTypeName), dateTimeArgs)) .appendIf(offset, getTimeZoneExpression(implementor))); break; case ARRAY: final Expression x = Expressions.convert_( Expressions.call(resultSet_, jdbcGetMethod(primitive), Expressions.constant(i + 1)), java.sql.Array.class); source = Expressions.call(BuiltInMethod.JDBC_ARRAY_TO_LIST.method, x); break; default: source = Expressions.call( resultSet_, jdbcGetMethod(primitive), Expressions.constant(i + 1)); } builder.add( Expressions.statement( Expressions.assign( target, source))); // [CALCITE-596] If primitive type columns contain null value, returns null // object if (primitive != null) { builder.add( Expressions.ifThen( Expressions.call(resultSet_, "wasNull"), Expressions.statement( Expressions.assign(target, Expressions.constant(null))))); } } private Method getMethod(SqlTypeName sqlTypeName, boolean nullable, boolean offset) { switch (sqlTypeName) { case DATE: return (nullable ? BuiltInMethod.DATE_TO_INT_OPTIONAL : BuiltInMethod.DATE_TO_INT).method; case TIME: return (nullable ? BuiltInMethod.TIME_TO_INT_OPTIONAL : BuiltInMethod.TIME_TO_INT).method; case TIMESTAMP: return (nullable ? (offset ? BuiltInMethod.TIMESTAMP_TO_LONG_OPTIONAL_OFFSET : BuiltInMethod.TIMESTAMP_TO_LONG_OPTIONAL) : (offset ? BuiltInMethod.TIMESTAMP_TO_LONG_OFFSET : BuiltInMethod.TIMESTAMP_TO_LONG)).method; default: throw new AssertionError(sqlTypeName + ":" + nullable); } } private Method getMethod2(SqlTypeName sqlTypeName) { switch (sqlTypeName) { case DATE: return BuiltInMethod.RESULT_SET_GET_DATE2.method; case TIME: return BuiltInMethod.RESULT_SET_GET_TIME2.method; case TIMESTAMP: return BuiltInMethod.RESULT_SET_GET_TIMESTAMP2.method; default: throw new AssertionError(sqlTypeName); } } /** E,g, {@code jdbcGetMethod(int)} returns "getInt". */ private String jdbcGetMethod(Primitive primitive) { return primitive == null ? "getObject" : "get" + SqlFunctions.initcap(primitive.primitiveName); } private SqlString generateSql(SqlDialect dialect) { final JdbcImplementor jdbcImplementor = new JdbcImplementor(dialect, (JavaTypeFactory) getCluster().getTypeFactory()); final JdbcImplementor.Result result = jdbcImplementor.visitChild(0, getInput()); return result.asStatement().toSqlString(dialect); } } // End JdbcToEnumerableConverter.java
/*-------------------------------------------------------------------------+ | | | Copyright 2005-2011 The ConQAT Project | | | | Licensed under the Apache License, Version 2.0 (the "License"); | | you may not use this file except in compliance with the License. | | You may obtain a copy of the License at | | | | http://www.apache.org/licenses/LICENSE-2.0 | | | | Unless required by applicable law or agreed to in writing, software | | distributed under the License is distributed on an "AS IS" BASIS, | | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | | See the License for the specific language governing permissions and | | limitations under the License. | +-------------------------------------------------------------------------*/ package org.conqat.engine.commons; import java.text.SimpleDateFormat; import org.conqat.engine.core.core.IConQATProcessor; /** * This is a class collecting the names and description strings of commonly used * parameters and their attributes. * * @author $Author: juergens $ * @version $Rev: 42129 $ * @ConQAT.Rating YELLOW Hash: A11D326F76B49D341B27DD51BAB4AE60 */ public abstract class ConQATParamDoc implements IConQATProcessor { /** Name of the input parameter. */ public static final String INPUT_NAME = "input"; /** Description of the input parameter. */ public static final String INPUT_DESC = "The input this processor works on."; /** Name of the ref attribute for the input parameter. */ public static final String INPUT_REF_NAME = "ref"; /** Description of the ref attribute for the input parameter. */ public static final String INPUT_REF_DESC = "Reference to the generating processor."; /** Name of the enable parameter. */ public static final String ENABLE_NAME = "enable"; /** Description of the enable parameter. */ public static final String ENABLE_DESC = "If set to true, processor is enabled. If disabled, no action is performed."; /** Name of the processor attribute for the enable parameter. */ public static final String ENABLE_PROCESSOR_NAME = "processor"; /** Name of the readkey parameter. */ public static final String READKEY_NAME = "read"; /** Description of the readkey parameter. */ public static final String READKEY_DESC = "The key to read from."; /** Name of the key attribute for the readkey parameter. */ public static final String READKEY_KEY_NAME = "key"; /** Description of the key attribute for the readkey parameter. */ public static final String READKEY_KEY_DESC = "The name of the key."; /** Name of the writekey parameter. */ public static final String WRITEKEY_NAME = "write"; /** Description of the writekey parameter. */ public static final String WRITEKEY_DESC = "The key to write to."; /** Name of the aggregation strategy parameter. */ public static final String AGG_STRATEGY_NAME = "aggregation"; /** Description of the aggregation strategy parameter. */ public static final String AGG_STRATEGY_DESC = "Define aggregation strategy."; /** Name of the strategy attribute. */ public static final String STRATEGY_NAME = "strategy"; /** Description of the strategy attribute. */ public static final String STRATEGY_DESC = "Enum value for aggregation strategy."; /** Name of the key attribute for the writekey parameter. */ public static final String WRITEKEY_KEY_NAME = READKEY_KEY_NAME; /** Description of the key attribute for the writekey parameter. */ public static final String WRITEKEY_KEY_DESC = READKEY_KEY_DESC; /** Name of the auth parameter. */ public static final String AUTH_NAME = "auth"; /** Description of the auth parameter. */ public static final String AUTH_DESC = "The authentification information."; /** Name of the user attribute for the auth parameter. */ public static final String AUTH_USER_NAME = "user"; /** Description of the user attribute for the auth parameter. */ public static final String AUTH_USER_DESC = "The username"; /** Name of the pass attribute for the auth parameter. */ public static final String AUTH_PASS_NAME = "pass"; /** Description of the pass attribute for the auth parameter. */ public static final String AUTH_PASS_DESC = "The password."; /** Name of the user attribute for the auth parameter. */ public static final String INCLUDE_NAME = "include"; /** Name of the user attribute for the auth parameter. */ public static final String EXCLUDE_NAME = "exclude"; /** Name of an attribute describing an ant pattern. */ public static final String ANT_PATTERN_NAME = "pattern"; /** Description of an attribute describing an ant pattern. */ public static final String ANT_PATTERN_DESC = "A pattern as defined by http://ant.apache.org/manual/dirtasks.html#patterns"; /** Name of an attribute describing an ant pattern. */ public static final String HTML_COLOR_NAME = "color"; /** Description of an attribute describing an ant pattern. */ public static final String HTML_COLOR_DESC = "The color using the #RRGGBB format known from HTML."; /** Description of an attribute accepting a Java RegEx pattern. */ public static final String REGEX_PATTERN_DESC = "A regular expression as described in the Java API documentation at " + "http://java.sun.com/j2se/1.5.0/docs/api/index.html"; /** * Description of an attribute accepting a {@link SimpleDateFormat} pattern. */ public static final String DATE_PATTERN_DESC = "The date pattern as specified by http://java.sun.com/javase/6/docs/api/java/text/SimpleDateFormat.html"; /** Name of a parameter to specify predecessors */ public static final String PREDECESSOR_NAME = "predecessor"; /** Description of a parameter to specify predecessors */ public static final String PREDECESSOR_NAME_DESC = "Processor that should be executed before this processor gets executed"; /** Name of an attribute referencing a predecessor */ public static final String PREDECESSOR_REF_NAME = INPUT_REF_NAME; /** Description of an attribute referencing a predecessor */ public static final String PREDECESSOR_REF_DESC = "Reference to the predecessor"; /** Name of the encoding parameter. */ public static final String ENCODING_PARAM_NAME = "encoding"; /** Description of the encoding parameter. */ public static final String ENCODING_PARAM_DESC = "Set encoding for files" + " in this scope [default encoding is used if not set]."; /** Name of the encoding attribute. */ public static final String ENCODING_ATTR_NAME = "name"; /** Description of the encoding attribute. */ public static final String ENCODING_ATTR_DESC = "Name of the encoding"; /** String that describes the type of a finding list. */ public static final String FINDING_LIST_TYPE = "java.util.List<org.conqat.engine.commons.findings.Finding>"; /** Name of repetition min length parameter */ public static final String REPETITION_MIN_LENGTH_NAME = "length"; /** Description of repetition min length parameter */ public static final String REPETITION_MIN_LENGTH_DESC = "Minimal number of statements contained in repetition. Must be > 0."; /** Name of repetition min instances parameter */ public static final String REPETITION_MIN_INSTANCES_NAME = "instances"; /** Description of repetition min instances parameter */ public static final String REPETITION_MIN_INSTANCES_DESC = "Minimal required number of motif instances in repetition. Must be >= 2."; /** Name of repetition min motif length parameter */ public static final String REPETITION_MIN_MOTIF_LENGTH_NAME = "min-motif-length"; /** Description of repetition min motig length parameter */ public static final String REPETITION_MIN_MOTIF_LENGTH_DESC = "Length of shortest repetition motif being searched for. Must be > 0."; /** Name of repetition max motif length parameter */ public static final String REPETITION_MAX_MOTIF_LENGTH_NAME = "max-motif-length"; /** Description of repetition max motif length parameter */ public static final String REPETITION_MAX_MOTIF_LENGTH_DESC = "Length of longest repetition motif being searched for. Must be >= min motig length."; /** Name of regex regions */ public static final String REGEX_REGIONS_NAME = "mark"; /** Description of regex regions name */ public static final String REGEX_REGIONS_DESC = "Parameters for region recognition."; /** Name of regex regions patterns */ public static final String REGEX_REGIONS_PATTERNS_NAME = "patterns"; /** Description of regex regions patterns */ public static final String REGEX_REGIONS_PATTERNS_DESC = "Reference to the pattern list used."; /** Name of regex regions origin */ public static final String REGEX_REGIONS_ORIGIN_NAME = "origin"; /** Description of regex regions origin */ public static final String REGEX_REGIONS_ORIGIN_DESC = "The name used for the origin."; /** Name of regex regions start at file begin */ public static final String REGEX_REGIONS_START_AT_FILE_BEGIN_NAME = "start-at-file-begin"; /** Description of regex regions start at file begin */ public static final String REGEX_REGIONS_START_AT_FILE_BEGIN_DESC = "The name used for the origin."; /** Name of block marker patterns */ public static final String BLOCK_MARKER_PATTERNS_NAME = "patterns"; /** Description of block marker patterns */ public static final String BLOCK_MARKER_PATTERNS_DESC = "Patterns that match block start. Each pattern must end with '\\{' to make sure " + "that it matches a block start."; /** Name of pattern list parameter */ public static final String PATTERN_LIST = "pattern-list"; /** Description of pattern list parameter */ public static final String PATTERN_LIST_DESC = "List of patterns."; /** Finding parameter. */ public static final String FINDING_NAME = "name"; /** Finding key attribute. */ public static final String FINDING_KEY_NAME = "key"; /** Finding key description. */ public static final String FINDING_KEY_DESC = "The key used for storing the findings in."; /** Name of parameter that determines whether to draw legend */ public static final String DRAW_LEGEND_PARAM = "legend"; /** Name of attribute that determines whether to draw legend */ public static final String DRAW_LEGEND_ATTRIBUTE = "draw"; /** Description of parameter */ public static final String DRAW_LEGEND_DESC = "Flag that determines whether or not to draw a legend. Default is true."; /** Name of finding parameter */ public static final String FINDING_PARAM_NAME = "finding"; /** Parameter for finding group */ public static final String FINDING_GROUP_NAME = "group"; /** Parameter for finding category */ public static final String FINDING_CATEGORY_NAME = "category"; /** Parameter for finding message */ public static final String FINDING_MESSAGE_NAME = "message"; /** Parameter name for path transformations. */ public static final String PATH_TRANSFORMATION_PARAM = "path-transformation"; /** Attribute name for path transformations. */ public static final String PATH_TRANSFORMATION_ATTRIBUTE = INPUT_REF_NAME; /** * Description for path transformations. Note that this is specific for * diffing processors as this mentions a comparee. */ public static final String PATH_TRANSFORMATION_DESCRIPTION = "If this parameter is set, the transformation is applied to the uniform paths of the comparee elements before matching elements."; /** Parameter doc for finding.key */ public static final String FINDING_KEYS_PARAM_DOC = "Adds a key under which to search for findings. " + "If no keys are given, all keys from the display list will be searched."; /** Parameter name for filter inversion */ public static final String INVERT_NAME = "invert"; /** Attribute value for value parameter. */ public static final String INVERT_VALUE_NAME = "value"; /** Parameter doc for filter inversion */ public static final String INVERT_PARAM_DOC = "If set to true, filter is inverted. Default: false."; /** The default name of the build configuration */ public static final String DEFAULT_CONFIGURATION_NAME = "Debug"; /** The default description for the build configuration name */ public static final String DEFAULT_CONFIGURATION_NAME_DESC = "Name of the configuration, e.g. 'Debug' or 'Release'"; /** The platform used by default */ public static final String DEFAULT_PLATFORM = "AnyCPU"; /** The default description for the platform used by default */ public static final String DEFAULT_PLATFORM_DESC = "Name of the platform, e.g. 'AnyCPU'"; /** Default name for primitive values. */ public static final String ATTRIBUTE_VALUE_NAME = "value"; /** Log level parameter. */ public static final String LOG_LEVEL_NAME = "log-level"; /** Log level description. */ public static final String LOG_LEVEL_DESCRIPTION = "This allows to specify the log level used for logging messages. Use OFF to turn off logging."; /** Name of the ignore parameter. */ public static final String IGNORE_NAME = "ignore"; /** Description of the ignore parameter. */ public static final String IGNORE_DESC = "Key under which a ignore flag is stored."; /** Name of the ignore key. */ public static final String IGNORE_KEY_NAME = "key"; /** Description of the ignore key. */ public static final String IGNORE_KEY_DESC = "If no key is given, no elements are ignored."; /** Name of the value key */ public static final String VALUE_KEY_NAME = "value"; /** Description of key in which string value of an object is stored */ public static final String STRING_VALUE_KEY_DESC = "String representation of value, e.g. 5 for an integer"; /** Name of type key */ public static final String TYPE_KEY_NAME = "type"; /** Description of type key */ public static final String TYPE_KEY_DESC = "Type of value (e.g. java.lang.String)"; /** Name of inclusion predicate parameter */ public static final String INCLUSION_PREDICATE_PARAM = "inclusion"; /** Name of inclusion predicate attribute */ public static final String INCLUSION_PREDICATE_ATTRIBUTE = "predicate"; /** Description of inclusion predicate parameter */ public static final String INCLUSION_PREDICATE_DESC = "If set, only nodes that are contained in the predicate are processed."; }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.processors.elasticsearch; import static org.apache.nifi.flowfile.attributes.CoreAttributes.MIME_TYPE; import com.fasterxml.jackson.databind.JsonNode; import java.util.Arrays; import okhttp3.HttpUrl; import okhttp3.OkHttpClient; import okhttp3.Response; import okhttp3.ResponseBody; import org.apache.commons.lang3.StringUtils; import org.apache.nifi.annotation.behavior.DynamicProperty; import org.apache.nifi.annotation.behavior.EventDriven; import org.apache.nifi.annotation.behavior.InputRequirement; import org.apache.nifi.annotation.behavior.SupportsBatching; import org.apache.nifi.annotation.behavior.WritesAttribute; import org.apache.nifi.annotation.behavior.WritesAttributes; import org.apache.nifi.annotation.documentation.CapabilityDescription; import org.apache.nifi.annotation.documentation.Tags; import org.apache.nifi.annotation.lifecycle.OnScheduled; import org.apache.nifi.components.AllowableValue; import org.apache.nifi.components.PropertyDescriptor; import org.apache.nifi.expression.ExpressionLanguageScope; import org.apache.nifi.flowfile.FlowFile; import org.apache.nifi.logging.ComponentLog; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.util.StandardValidators; import java.io.ByteArrayInputStream; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import java.util.stream.Stream; @InputRequirement(InputRequirement.Requirement.INPUT_ALLOWED) @EventDriven @SupportsBatching @Tags({ "elasticsearch", "query", "read", "get", "http" }) @CapabilityDescription("Queries Elasticsearch using the specified connection properties. " + "Note that the full body of each page of documents will be read into memory before being " + "written to Flow Files for transfer. Also note that the Elasticsearch max_result_window index " + "setting is the upper bound on the number of records that can be retrieved using this query. " + "To retrieve more records, use the ScrollElasticsearchHttp processor.") @WritesAttributes({ @WritesAttribute(attribute = "filename", description = "The filename attribute is set to the document identifier"), @WritesAttribute(attribute = "es.query.hitcount", description = "The number of hits for a query"), @WritesAttribute(attribute = "es.id", description = "The Elasticsearch document identifier"), @WritesAttribute(attribute = "es.index", description = "The Elasticsearch index containing the document"), @WritesAttribute(attribute = "es.query.url", description = "The Elasticsearch query that was built"), @WritesAttribute(attribute = "es.type", description = "The Elasticsearch document type"), @WritesAttribute(attribute = "es.result.*", description = "If Target is 'Flow file attributes', the JSON attributes of " + "each result will be placed into corresponding attributes with this prefix.") }) @DynamicProperty( name = "A URL query parameter", value = "The value to set it to", expressionLanguageScope = ExpressionLanguageScope.VARIABLE_REGISTRY, description = "Adds the specified property name/value as a query parameter in the Elasticsearch URL used for processing") public class QueryElasticsearchHttp extends AbstractElasticsearchHttpProcessor { public enum QueryInfoRouteStrategy { NEVER, ALWAYS, NOHIT } private static final String FROM_QUERY_PARAM = "from"; public static final String TARGET_FLOW_FILE_CONTENT = "Flow file content"; public static final String TARGET_FLOW_FILE_ATTRIBUTES = "Flow file attributes"; private static final String ATTRIBUTE_PREFIX = "es.result."; static final AllowableValue ALWAYS = new AllowableValue(QueryInfoRouteStrategy.ALWAYS.name(), "Always", "Always route Query Info"); static final AllowableValue NEVER = new AllowableValue(QueryInfoRouteStrategy.NEVER.name(), "Never", "Never route Query Info"); static final AllowableValue NO_HITS = new AllowableValue(QueryInfoRouteStrategy.NOHIT.name(), "No Hits", "Route Query Info if the Query returns no hits"); public static final Relationship REL_SUCCESS = new Relationship.Builder() .name("success") .description( "All FlowFiles that are read from Elasticsearch are routed to this relationship.") .build(); public static final Relationship REL_FAILURE = new Relationship.Builder() .name("failure") .description( "All FlowFiles that cannot be read from Elasticsearch are routed to this relationship. Note that only incoming " + "flow files will be routed to failure.").build(); public static final Relationship REL_RETRY = new Relationship.Builder() .name("retry") .description( "A FlowFile is routed to this relationship if the document cannot be fetched but attempting the operation again may " + "succeed. Note that if the processor has no incoming connections, flow files may still be sent to this relationship " + "based on the processor properties and the results of the fetch operation.") .build(); public static final Relationship REL_QUERY_INFO = new Relationship.Builder() .name("query-info") .description( "Depending on the setting of the Routing Strategy for Query Info property, a FlowFile is routed to this relationship with " + "the incoming FlowFile's attributes (if present), the number of hits, and the Elasticsearch query") .build(); public static final PropertyDescriptor QUERY = new PropertyDescriptor.Builder() .name("query-es-query") .displayName("Query") .description("The Lucene-style query to run against ElasticSearch (e.g., genre:blues AND -artist:muddy)") .required(true) .expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES) .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .build(); public static final PropertyDescriptor INDEX = new PropertyDescriptor.Builder() .name("query-es-index") .displayName("Index") .description("The name of the index to read from. If the property is set " + "to _all, the query will match across all indexes.") .required(true) .expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES) .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .build(); public static final PropertyDescriptor TYPE = new PropertyDescriptor.Builder() .name("query-es-type") .displayName("Type") .description( "The (optional) type of this query, used by Elasticsearch for indexing and searching. If the property is empty, " + "the the query will match across all types.") .required(false) .expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES) .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .build(); public static final PropertyDescriptor FIELDS = new PropertyDescriptor.Builder() .name("query-es-fields") .displayName("Fields") .description( "A comma-separated list of fields to retrieve from the document. If the Fields property is left blank, " + "then the entire document's source will be retrieved.") .required(false) .expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES) .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .build(); public static final PropertyDescriptor SORT = new PropertyDescriptor.Builder() .name("query-es-sort") .displayName("Sort") .description( "A sort parameter (e.g., timestamp:asc). If the Sort property is left blank, " + "then the results will be retrieved in document order.") .required(false) .expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES) .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .build(); public static final PropertyDescriptor PAGE_SIZE = new PropertyDescriptor.Builder() .name("query-es-size") .displayName("Page Size") .defaultValue("20") .description("Determines how many documents to return per page during scrolling.") .required(true) .expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES) .addValidator(StandardValidators.POSITIVE_INTEGER_VALIDATOR) .build(); public static final PropertyDescriptor LIMIT = new PropertyDescriptor.Builder() .name("query-es-limit") .displayName("Limit") .description("If set, limits the number of results that will be returned.") .required(false) .expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES) .addValidator(StandardValidators.POSITIVE_INTEGER_VALIDATOR) .build(); public static final PropertyDescriptor TARGET = new PropertyDescriptor.Builder() .name("query-es-target") .displayName("Target") .description( "Indicates where the results should be placed. In the case of 'Flow file content', the JSON " + "response will be written as the content of the flow file. In the case of 'Flow file attributes', " + "the original flow file (if applicable) will be cloned for each result, and all return fields will be placed " + "in a flow file attribute of the same name, but prefixed by 'es.result.'") .required(true) .expressionLanguageSupported(ExpressionLanguageScope.NONE) .defaultValue(TARGET_FLOW_FILE_CONTENT) .allowableValues(TARGET_FLOW_FILE_CONTENT, TARGET_FLOW_FILE_ATTRIBUTES) .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .build(); public static final PropertyDescriptor ROUTING_QUERY_INFO_STRATEGY = new PropertyDescriptor.Builder() .name("routing-query-info-strategy") .displayName("Routing Strategy for Query Info") .description("Specifies when to generate and route Query Info after a successful query") .expressionLanguageSupported(ExpressionLanguageScope.NONE) .allowableValues(ALWAYS, NEVER, NO_HITS) .defaultValue(NEVER.getValue()) .required(false) .build(); private volatile Set<Relationship> relationships = new HashSet<>(Arrays.asList(new Relationship[] {REL_SUCCESS, REL_FAILURE, REL_RETRY})); private static final List<PropertyDescriptor> propertyDescriptors; private QueryInfoRouteStrategy queryInfoRouteStrategy = QueryInfoRouteStrategy.NEVER; static { final List<PropertyDescriptor> descriptors = new ArrayList<>(COMMON_PROPERTY_DESCRIPTORS); descriptors.add(QUERY); descriptors.add(PAGE_SIZE); descriptors.add(INDEX); descriptors.add(TYPE); descriptors.add(FIELDS); descriptors.add(SORT); descriptors.add(LIMIT); descriptors.add(TARGET); descriptors.add(ROUTING_QUERY_INFO_STRATEGY); propertyDescriptors = Collections.unmodifiableList(descriptors); } @Override public Set<Relationship> getRelationships() { return relationships; } @Override public final List<PropertyDescriptor> getSupportedPropertyDescriptors() { return propertyDescriptors; } @OnScheduled public void setup(ProcessContext context) { super.setup(context); } @Override public void onPropertyModified(final PropertyDescriptor descriptor, final String oldValue, final String newValue) { if (ROUTING_QUERY_INFO_STRATEGY.equals(descriptor)) { final Set<Relationship> relationshipSet = new HashSet<>(); relationshipSet.add(REL_SUCCESS); relationshipSet.add(REL_FAILURE); relationshipSet.add(REL_RETRY); if (ALWAYS.getValue().equalsIgnoreCase(newValue) || NO_HITS.getValue().equalsIgnoreCase(newValue)) { relationshipSet.add(REL_QUERY_INFO); } this.queryInfoRouteStrategy = QueryInfoRouteStrategy.valueOf(newValue); this.relationships = relationshipSet; } } @Override public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException { FlowFile flowFile = null; if (context.hasIncomingConnection()) { flowFile = session.get(); // If we have no FlowFile, and all incoming connections are self-loops then we can // continue on. // However, if we have no FlowFile and we have connections coming from other Processors, // then // we know that we should run only if we have a FlowFile. if (flowFile == null && context.hasNonLoopConnection()) { return; } } OkHttpClient okHttpClient = getClient(); final String index = context.getProperty(INDEX).evaluateAttributeExpressions(flowFile) .getValue(); final String query = context.getProperty(QUERY).evaluateAttributeExpressions(flowFile) .getValue(); final String docType = context.getProperty(TYPE).evaluateAttributeExpressions(flowFile) .getValue(); final int pageSize = context.getProperty(PAGE_SIZE).evaluateAttributeExpressions(flowFile) .asInteger().intValue(); final Integer limit = context.getProperty(LIMIT).isSet() ? context.getProperty(LIMIT) .evaluateAttributeExpressions(flowFile).asInteger().intValue() : null; final String fields = context.getProperty(FIELDS).isSet() ? context.getProperty(FIELDS) .evaluateAttributeExpressions(flowFile).getValue() : null; final String sort = context.getProperty(SORT).isSet() ? context.getProperty(SORT) .evaluateAttributeExpressions(flowFile).getValue() : null; final boolean targetIsContent = context.getProperty(TARGET).getValue() .equals(TARGET_FLOW_FILE_CONTENT); // Authentication final String username = context.getProperty(USERNAME).evaluateAttributeExpressions().getValue(); final String password = context.getProperty(PASSWORD).evaluateAttributeExpressions().getValue(); final ComponentLog logger = getLogger(); int fromIndex = 0; int numResults = 0; try { logger.debug("Querying {}/{} from Elasticsearch: {}", new Object[] { index, docType, query }); final long startNanos = System.nanoTime(); // read the url property from the context final String urlstr = StringUtils.trimToEmpty(context.getProperty(ES_URL).evaluateAttributeExpressions().getValue()); boolean hitLimit = false; do { int mPageSize = pageSize; if (limit != null && limit <= (fromIndex + pageSize)) { mPageSize = limit - fromIndex; hitLimit = true; } final URL queryUrl = buildRequestURL(urlstr, query, index, docType, fields, sort, mPageSize, fromIndex, context); final Response getResponse = sendRequestToElasticsearch(okHttpClient, queryUrl, username, password, "GET", null); numResults = this.getPage(getResponse, queryUrl, context, session, flowFile, logger, startNanos, targetIsContent, numResults); fromIndex += pageSize; getResponse.close(); } while (numResults > 0 && !hitLimit); if (flowFile != null) { session.remove(flowFile); } } catch (IOException ioe) { logger.error( "Failed to read from Elasticsearch due to {}, this may indicate an error in configuration " + "(hosts, username/password, etc.). Routing to retry", new Object[] { ioe.getLocalizedMessage() }, ioe); if (flowFile != null) { session.transfer(flowFile, REL_RETRY); } context.yield(); } catch (RetryableException e) { logger.error(e.getMessage(), new Object[] { e.getLocalizedMessage() }, e); if (flowFile != null) { session.transfer(flowFile, REL_RETRY); } context.yield(); } catch (Exception e) { logger.error("Failed to read {} from Elasticsearch due to {}", new Object[] { flowFile, e.getLocalizedMessage() }, e); if (flowFile != null) { session.transfer(flowFile, REL_FAILURE); } context.yield(); } } private int getPage(final Response getResponse, final URL url, final ProcessContext context, final ProcessSession session, FlowFile flowFile, final ComponentLog logger, final long startNanos, boolean targetIsContent, int priorResultCount) throws IOException { List<FlowFile> page = new ArrayList<>(); final int statusCode = getResponse.code(); if (isSuccess(statusCode)) { ResponseBody body = getResponse.body(); final byte[] bodyBytes = body.bytes(); JsonNode responseJson = parseJsonResponse(new ByteArrayInputStream(bodyBytes)); JsonNode hits = responseJson.get("hits").get("hits"); // if there are no hits, and there have never been any hits in this run ( priorResultCount ) and // we are in NOHIT or ALWAYS, send the query info if ( (hits.size() == 0 && priorResultCount == 0 && queryInfoRouteStrategy == QueryInfoRouteStrategy.NOHIT) || queryInfoRouteStrategy == QueryInfoRouteStrategy.ALWAYS) { FlowFile queryInfo = flowFile == null ? session.create() : session.create(flowFile); session.putAttribute(queryInfo, "es.query.url", url.toExternalForm()); session.putAttribute(queryInfo, "es.query.hitcount", String.valueOf(hits.size())); session.putAttribute(queryInfo, MIME_TYPE.key(), "application/json"); session.transfer(queryInfo,REL_QUERY_INFO); } for(int i = 0; i < hits.size(); i++) { JsonNode hit = hits.get(i); String retrievedId = hit.get("_id").asText(); String retrievedIndex = hit.get("_index").asText(); String retrievedType = hit.get("_type").asText(); FlowFile documentFlowFile = null; if (flowFile != null) { documentFlowFile = targetIsContent ? session.create(flowFile) : session.clone(flowFile); } else { documentFlowFile = session.create(); } JsonNode source = hit.get("_source"); documentFlowFile = session.putAttribute(documentFlowFile, "es.id", retrievedId); documentFlowFile = session.putAttribute(documentFlowFile, "es.index", retrievedIndex); documentFlowFile = session.putAttribute(documentFlowFile, "es.type", retrievedType); documentFlowFile = session.putAttribute(documentFlowFile, "es.query.url", url.toExternalForm()); if (targetIsContent) { documentFlowFile = session.putAttribute(documentFlowFile, "filename", retrievedId); documentFlowFile = session.putAttribute(documentFlowFile, "mime.type", "application/json"); documentFlowFile = session.write(documentFlowFile, out -> { out.write(source.toString().getBytes()); }); } else { Map<String, String> attributes = new HashMap<>(); for(Iterator<Entry<String, JsonNode>> it = source.fields(); it.hasNext(); ) { Entry<String, JsonNode> entry = it.next(); String textValue = ""; if(entry.getValue().isArray()){ ArrayList<String> text_values = new ArrayList<String>(); for(Iterator<JsonNode> items = entry.getValue().iterator(); items.hasNext(); ) { text_values.add(items.next().asText()); } textValue = StringUtils.join(text_values, ','); } else { textValue = entry.getValue().asText(); } attributes.put(ATTRIBUTE_PREFIX + entry.getKey(), textValue); } documentFlowFile = session.putAllAttributes(documentFlowFile, attributes); } page.add(documentFlowFile); } logger.debug("Elasticsearch retrieved " + responseJson.size() + " documents, routing to success"); session.transfer(page, REL_SUCCESS); } else { try { // 5xx -> RETRY, but a server error might last a while, so yield if (statusCode / 100 == 5) { throw new RetryableException(String.format("Elasticsearch returned code %s with message %s, transferring flow file to retry. This is likely a server problem, yielding...", statusCode, getResponse.message())); } else if (context.hasIncomingConnection()) { // 1xx, 3xx, 4xx -> NO RETRY throw new UnretryableException(String.format("Elasticsearch returned code %s with message %s, transferring flow file to failure", statusCode, getResponse.message())); } else { logger.warn("Elasticsearch returned code {} with message {}", new Object[]{statusCode, getResponse.message()}); } } finally { if (!page.isEmpty()) { session.remove(page); page.clear(); } } } // emit provenance event final long millis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos); if (!page.isEmpty()) { if (context.hasNonLoopConnection()) { page.forEach(f -> session.getProvenanceReporter().fetch(f, url.toExternalForm(), millis)); } else { page.forEach(f -> session.getProvenanceReporter().receive(f, url.toExternalForm(), millis)); } } return page.size(); } private URL buildRequestURL(String baseUrl, String query, String index, String type, String fields, String sort, int pageSize, int fromIndex, ProcessContext context) throws MalformedURLException { if (StringUtils.isEmpty(baseUrl)) { throw new MalformedURLException("Base URL cannot be null"); } HttpUrl.Builder builder = HttpUrl.parse(baseUrl).newBuilder(); builder.addPathSegment((StringUtils.isEmpty(index)) ? "_all" : index); if (!StringUtils.isEmpty(type)) { builder.addPathSegment(type); } builder.addPathSegment("_search"); builder.addQueryParameter(QUERY_QUERY_PARAM, query); builder.addQueryParameter(SIZE_QUERY_PARAM, String.valueOf(pageSize)); builder.addQueryParameter(FROM_QUERY_PARAM, String.valueOf(fromIndex)); if (!StringUtils.isEmpty(fields)) { String trimmedFields = Stream.of(fields.split(",")).map(String::trim).collect(Collectors.joining(",")); builder.addQueryParameter(FIELD_INCLUDE_QUERY_PARAM, trimmedFields); } if (!StringUtils.isEmpty(sort)) { String trimmedFields = Stream.of(sort.split(",")).map(String::trim).collect(Collectors.joining(",")); builder.addQueryParameter(SORT_QUERY_PARAM, trimmedFields); } // Find the user-added properties and set them as query parameters on the URL for (Map.Entry<PropertyDescriptor, String> property : context.getProperties().entrySet()) { PropertyDescriptor pd = property.getKey(); if (pd.isDynamic()) { if (property.getValue() != null) { builder.addQueryParameter(pd.getName(), context.getProperty(pd).evaluateAttributeExpressions().getValue()); } } } return builder.build().url(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.processors.standard; import java.io.BufferedInputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.charset.StandardCharsets; import java.sql.Connection; import java.sql.DatabaseMetaData; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Types; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.concurrent.atomic.AtomicReference; import com.github.benmanes.caffeine.cache.Cache; import com.github.benmanes.caffeine.cache.Caffeine; import org.apache.nifi.annotation.behavior.InputRequirement; import org.apache.nifi.annotation.behavior.InputRequirement.Requirement; import org.apache.nifi.annotation.behavior.SideEffectFree; import org.apache.nifi.annotation.behavior.SupportsBatching; import org.apache.nifi.annotation.behavior.WritesAttribute; import org.apache.nifi.annotation.behavior.WritesAttributes; import org.apache.nifi.annotation.documentation.CapabilityDescription; import org.apache.nifi.annotation.documentation.SeeAlso; import org.apache.nifi.annotation.documentation.Tags; import org.apache.nifi.annotation.lifecycle.OnScheduled; import org.apache.nifi.components.AllowableValue; import org.apache.nifi.components.PropertyDescriptor; import org.apache.nifi.dbcp.DBCPService; import org.apache.nifi.expression.ExpressionLanguageScope; import org.apache.nifi.flowfile.FlowFile; import org.apache.nifi.flowfile.attributes.CoreAttributes; import org.apache.nifi.processor.AbstractProcessor; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.io.InputStreamCallback; import org.apache.nifi.processor.io.OutputStreamCallback; import org.apache.nifi.processor.util.StandardValidators; import org.codehaus.jackson.JsonNode; import org.codehaus.jackson.map.ObjectMapper; import org.codehaus.jackson.node.ArrayNode; import org.codehaus.jackson.node.JsonNodeFactory; import static org.apache.nifi.flowfile.attributes.FragmentAttributes.FRAGMENT_COUNT; import static org.apache.nifi.flowfile.attributes.FragmentAttributes.FRAGMENT_ID; import static org.apache.nifi.flowfile.attributes.FragmentAttributes.FRAGMENT_INDEX; import static org.apache.nifi.flowfile.attributes.FragmentAttributes.copyAttributesToOriginal; @SideEffectFree @SupportsBatching @SeeAlso(PutSQL.class) @InputRequirement(Requirement.INPUT_REQUIRED) @Tags({"json", "sql", "database", "rdbms", "insert", "update", "delete", "relational", "flat"}) @CapabilityDescription("Converts a JSON-formatted FlowFile into an UPDATE, INSERT, or DELETE SQL statement. The incoming FlowFile is expected to be " + "\"flat\" JSON message, meaning that it consists of a single JSON element and each field maps to a simple type. If a field maps to " + "a JSON object, that JSON object will be interpreted as Text. If the input is an array of JSON elements, each element in the array is " + "output as a separate FlowFile to the 'sql' relationship. Upon successful conversion, the original FlowFile is routed to the 'original' " + "relationship and the SQL is routed to the 'sql' relationship.") @WritesAttributes({ @WritesAttribute(attribute="mime.type", description="Sets mime.type of FlowFile that is routed to 'sql' to 'text/plain'."), @WritesAttribute(attribute = "<sql>.table", description = "Sets the <sql>.table attribute of FlowFile that is routed to 'sql' to the name of the table that is updated by the SQL statement. " + "The prefix for this attribute ('sql', e.g.) is determined by the SQL Parameter Attribute Prefix property."), @WritesAttribute(attribute="<sql>.catalog", description="If the Catalog name is set for this database, specifies the name of the catalog that the SQL statement will update. " + "If no catalog is used, this attribute will not be added. The prefix for this attribute ('sql', e.g.) is determined by the SQL Parameter Attribute Prefix property."), @WritesAttribute(attribute="fragment.identifier", description="All FlowFiles routed to the 'sql' relationship for the same incoming FlowFile (multiple will be output for the same incoming " + "FlowFile if the incoming FlowFile is a JSON Array) will have the same value for the fragment.identifier attribute. This can then be used to correlate the results."), @WritesAttribute(attribute="fragment.count", description="The number of SQL FlowFiles that were produced for same incoming FlowFile. This can be used in conjunction with the " + "fragment.identifier attribute in order to know how many FlowFiles belonged to the same incoming FlowFile."), @WritesAttribute(attribute="fragment.index", description="The position of this FlowFile in the list of outgoing FlowFiles that were all derived from the same incoming FlowFile. This can be " + "used in conjunction with the fragment.identifier and fragment.count attributes to know which FlowFiles originated from the same incoming FlowFile and in what order the SQL " + "FlowFiles were produced"), @WritesAttribute(attribute="<sql>.args.N.type", description="The output SQL statements are parametrized in order to avoid SQL Injection Attacks. The types of the Parameters " + "to use are stored in attributes named <sql>.args.1.type, <sql>.args.2.type, <sql>.args.3.type, and so on. The type is a number representing a JDBC Type constant. " + "Generally, this is useful only for software to read and interpret but is added so that a processor such as PutSQL can understand how to interpret the values. " + "The prefix for this attribute ('sql', e.g.) is determined by the SQL Parameter Attribute Prefix property."), @WritesAttribute(attribute="<sql>.args.N.value", description="The output SQL statements are parametrized in order to avoid SQL Injection Attacks. The values of the Parameters " + "to use are stored in the attributes named sql.args.1.value, sql.args.2.value, sql.args.3.value, and so on. Each of these attributes has a corresponding " + "<sql>.args.N.type attribute that indicates how the value should be interpreted when inserting it into the database." + "The prefix for this attribute ('sql', e.g.) is determined by the SQL Parameter Attribute Prefix property.") }) public class ConvertJSONToSQL extends AbstractProcessor { private static final String UPDATE_TYPE = "UPDATE"; private static final String INSERT_TYPE = "INSERT"; private static final String DELETE_TYPE = "DELETE"; static final AllowableValue IGNORE_UNMATCHED_FIELD = new AllowableValue("Ignore Unmatched Fields", "Ignore Unmatched Fields", "Any field in the JSON document that cannot be mapped to a column in the database is ignored"); static final AllowableValue FAIL_UNMATCHED_FIELD = new AllowableValue("Fail", "Fail", "If the JSON document has any field that cannot be mapped to a column in the database, the FlowFile will be routed to the failure relationship"); static final AllowableValue IGNORE_UNMATCHED_COLUMN = new AllowableValue("Ignore Unmatched Columns", "Ignore Unmatched Columns", "Any column in the database that does not have a field in the JSON document will be assumed to not be required. No notification will be logged"); static final AllowableValue WARNING_UNMATCHED_COLUMN = new AllowableValue("Warn on Unmatched Columns", "Warn on Unmatched Columns", "Any column in the database that does not have a field in the JSON document will be assumed to not be required. A warning will be logged"); static final AllowableValue FAIL_UNMATCHED_COLUMN = new AllowableValue("Fail on Unmatched Columns", "Fail on Unmatched Columns", "A flow will fail if any column in the database that does not have a field in the JSON document. An error will be logged"); static final PropertyDescriptor CONNECTION_POOL = new PropertyDescriptor.Builder() .name("JDBC Connection Pool") .description("Specifies the JDBC Connection Pool to use in order to convert the JSON message to a SQL statement. " + "The Connection Pool is necessary in order to determine the appropriate database column types.") .identifiesControllerService(DBCPService.class) .required(true) .build(); static final PropertyDescriptor STATEMENT_TYPE = new PropertyDescriptor.Builder() .name("Statement Type") .description("Specifies the type of SQL Statement to generate") .required(true) .allowableValues(UPDATE_TYPE, INSERT_TYPE, DELETE_TYPE) .build(); static final PropertyDescriptor TABLE_NAME = new PropertyDescriptor.Builder() .name("Table Name") .description("The name of the table that the statement should update") .required(true) .expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES) .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .build(); static final PropertyDescriptor CATALOG_NAME = new PropertyDescriptor.Builder() .name("Catalog Name") .description("The name of the catalog that the statement should update. This may not apply for the database that you are updating. In this case, leave the field empty") .required(false) .expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES) .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .build(); static final PropertyDescriptor SCHEMA_NAME = new PropertyDescriptor.Builder() .name("Schema Name") .description("The name of the schema that the table belongs to. This may not apply for the database that you are updating. In this case, leave the field empty") .required(false) .expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES) .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .build(); static final PropertyDescriptor TRANSLATE_FIELD_NAMES = new PropertyDescriptor.Builder() .name("Translate Field Names") .description("If true, the Processor will attempt to translate JSON field names into the appropriate column names for the table specified. " + "If false, the JSON field names must match the column names exactly, or the column will not be updated") .allowableValues("true", "false") .defaultValue("true") .build(); static final PropertyDescriptor UNMATCHED_FIELD_BEHAVIOR = new PropertyDescriptor.Builder() .name("Unmatched Field Behavior") .description("If an incoming JSON element has a field that does not map to any of the database table's columns, this property specifies how to handle the situation") .allowableValues(IGNORE_UNMATCHED_FIELD, FAIL_UNMATCHED_FIELD) .defaultValue(IGNORE_UNMATCHED_FIELD.getValue()) .build(); static final PropertyDescriptor UNMATCHED_COLUMN_BEHAVIOR = new PropertyDescriptor.Builder() .name("Unmatched Column Behavior") .description("If an incoming JSON element does not have a field mapping for all of the database table's columns, this property specifies how to handle the situation") .allowableValues(IGNORE_UNMATCHED_COLUMN, WARNING_UNMATCHED_COLUMN ,FAIL_UNMATCHED_COLUMN) .defaultValue(FAIL_UNMATCHED_COLUMN.getValue()) .build(); static final PropertyDescriptor UPDATE_KEY = new PropertyDescriptor.Builder() .name("Update Keys") .description("A comma-separated list of column names that uniquely identifies a row in the database for UPDATE statements. " + "If the Statement Type is UPDATE and this property is not set, the table's Primary Keys are used. " + "In this case, if no Primary Key exists, the conversion to SQL will fail if Unmatched Column Behaviour is set to FAIL. " + "This property is ignored if the Statement Type is INSERT") .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .required(false) .expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES) .build(); static final PropertyDescriptor QUOTED_IDENTIFIERS = new PropertyDescriptor.Builder() .name("jts-quoted-identifiers") .displayName("Quote Column Identifiers") .description("Enabling this option will cause all column names to be quoted, allowing you to " + "use reserved words as column names in your tables.") .allowableValues("true", "false") .defaultValue("false") .build(); static final PropertyDescriptor QUOTED_TABLE_IDENTIFIER = new PropertyDescriptor.Builder() .name("jts-quoted-table-identifiers") .displayName("Quote Table Identifiers") .description("Enabling this option will cause the table name to be quoted to support the " + "use of special characters in the table name") .allowableValues("true", "false") .defaultValue("false") .build(); static final PropertyDescriptor SQL_PARAM_ATTR_PREFIX = new PropertyDescriptor.Builder() .name("jts-sql-param-attr-prefix") .displayName("SQL Parameter Attribute Prefix") .description("The string to be prepended to the outgoing flow file attributes, such as <sql>.args.1.value, where <sql> is replaced with the specified value") .expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES) .addValidator(StandardValidators.NON_EMPTY_EL_VALIDATOR) .required(true) .defaultValue("sql") .build(); static final PropertyDescriptor TABLE_SCHEMA_CACHE_SIZE = new PropertyDescriptor.Builder() .name("table-schema-cache-size") .displayName("Table Schema Cache Size") .description("Specifies how many Table Schemas should be cached") .addValidator(StandardValidators.NON_NEGATIVE_INTEGER_VALIDATOR) .defaultValue("100") .required(true) .build(); static final Relationship REL_ORIGINAL = new Relationship.Builder() .name("original") .description("When a FlowFile is converted to SQL, the original JSON FlowFile is routed to this relationship") .build(); static final Relationship REL_SQL = new Relationship.Builder() .name("sql") .description("A FlowFile is routed to this relationship when its contents have successfully been converted into a SQL statement") .build(); static final Relationship REL_FAILURE = new Relationship.Builder() .name("failure") .description("A FlowFile is routed to this relationship if it cannot be converted into a SQL statement. Common causes include invalid JSON " + "content or the JSON content missing a required field (if using an INSERT statement type).") .build(); private Cache<SchemaKey, TableSchema> schemaCache; @Override protected List<PropertyDescriptor> getSupportedPropertyDescriptors() { final List<PropertyDescriptor> properties = new ArrayList<>(); properties.add(CONNECTION_POOL); properties.add(STATEMENT_TYPE); properties.add(TABLE_NAME); properties.add(CATALOG_NAME); properties.add(SCHEMA_NAME); properties.add(TRANSLATE_FIELD_NAMES); properties.add(UNMATCHED_FIELD_BEHAVIOR); properties.add(UNMATCHED_COLUMN_BEHAVIOR); properties.add(UPDATE_KEY); properties.add(QUOTED_IDENTIFIERS); properties.add(QUOTED_TABLE_IDENTIFIER); properties.add(SQL_PARAM_ATTR_PREFIX); properties.add(TABLE_SCHEMA_CACHE_SIZE); return properties; } @Override public Set<Relationship> getRelationships() { final Set<Relationship> rels = new HashSet<>(); rels.add(REL_ORIGINAL); rels.add(REL_SQL); rels.add(REL_FAILURE); return rels; } @OnScheduled public void onScheduled(final ProcessContext context) { final int tableSchemaCacheSize = context.getProperty(TABLE_SCHEMA_CACHE_SIZE).asInteger(); schemaCache = Caffeine.newBuilder() .maximumSize(tableSchemaCacheSize) .build(); } @Override public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException { final FlowFile flowFile = session.get(); if (flowFile == null) { return; } final boolean translateFieldNames = context.getProperty(TRANSLATE_FIELD_NAMES).asBoolean(); final boolean ignoreUnmappedFields = IGNORE_UNMATCHED_FIELD.getValue().equalsIgnoreCase(context.getProperty(UNMATCHED_FIELD_BEHAVIOR).getValue()); final String statementType = context.getProperty(STATEMENT_TYPE).getValue(); final String updateKeys = context.getProperty(UPDATE_KEY).evaluateAttributeExpressions(flowFile).getValue(); final String catalog = context.getProperty(CATALOG_NAME).evaluateAttributeExpressions(flowFile).getValue(); final String schemaName = context.getProperty(SCHEMA_NAME).evaluateAttributeExpressions(flowFile).getValue(); final String tableName = context.getProperty(TABLE_NAME).evaluateAttributeExpressions(flowFile).getValue(); final SchemaKey schemaKey = new SchemaKey(catalog, tableName); final boolean includePrimaryKeys = UPDATE_TYPE.equals(statementType) && updateKeys == null; // Is the unmatched column behaviour fail or warning? final boolean failUnmappedColumns = FAIL_UNMATCHED_COLUMN.getValue().equalsIgnoreCase(context.getProperty(UNMATCHED_COLUMN_BEHAVIOR).getValue()); final boolean warningUnmappedColumns = WARNING_UNMATCHED_COLUMN.getValue().equalsIgnoreCase(context.getProperty(UNMATCHED_COLUMN_BEHAVIOR).getValue()); //Escape column names? final boolean escapeColumnNames = context.getProperty(QUOTED_IDENTIFIERS).asBoolean(); // Quote table name? final boolean quoteTableName = context.getProperty(QUOTED_TABLE_IDENTIFIER).asBoolean(); // Attribute prefix final String attributePrefix = context.getProperty(SQL_PARAM_ATTR_PREFIX).evaluateAttributeExpressions(flowFile).getValue(); TableSchema schema; try { schema = schemaCache.get(schemaKey, key -> { final DBCPService dbcpService = context.getProperty(CONNECTION_POOL).asControllerService(DBCPService.class); try (final Connection conn = dbcpService.getConnection(flowFile.getAttributes())) { return TableSchema.from(conn, catalog, schemaName, tableName, translateFieldNames, includePrimaryKeys); } catch (final SQLException e) { throw new ProcessException(e); } }); } catch (ProcessException e) { getLogger().error("Failed to convert {} into a SQL statement due to {}; routing to failure", new Object[]{flowFile, e.toString()}, e); session.transfer(flowFile, REL_FAILURE); return; } // Parse the JSON document final ObjectMapper mapper = new ObjectMapper(); final AtomicReference<JsonNode> rootNodeRef = new AtomicReference<>(null); try { session.read(flowFile, new InputStreamCallback() { @Override public void process(final InputStream in) throws IOException { try (final InputStream bufferedIn = new BufferedInputStream(in)) { rootNodeRef.set(mapper.readTree(bufferedIn)); } } }); } catch (final ProcessException pe) { getLogger().error("Failed to parse {} as JSON due to {}; routing to failure", new Object[] {flowFile, pe.toString()}, pe); session.transfer(flowFile, REL_FAILURE); return; } final JsonNode rootNode = rootNodeRef.get(); // The node may or may not be a Json Array. If it isn't, we will create an // ArrayNode and add just the root node to it. We do this so that we can easily iterate // over the array node, rather than duplicating the logic or creating another function that takes many variables // in order to implement the logic. final ArrayNode arrayNode; if (rootNode.isArray()) { arrayNode = (ArrayNode) rootNode; } else { final JsonNodeFactory nodeFactory = JsonNodeFactory.instance; arrayNode = new ArrayNode(nodeFactory); arrayNode.add(rootNode); } final String fragmentIdentifier = UUID.randomUUID().toString(); final Set<FlowFile> created = new HashSet<>(); for (int i=0; i < arrayNode.size(); i++) { final JsonNode jsonNode = arrayNode.get(i); final String sql; final Map<String, String> attributes = new HashMap<>(); try { // build the fully qualified table name final StringBuilder tableNameBuilder = new StringBuilder(); if (catalog != null) { tableNameBuilder.append(catalog).append("."); } if (schemaName != null) { tableNameBuilder.append(schemaName).append("."); } tableNameBuilder.append(tableName); final String fqTableName = tableNameBuilder.toString(); if (INSERT_TYPE.equals(statementType)) { sql = generateInsert(jsonNode, attributes, fqTableName, schema, translateFieldNames, ignoreUnmappedFields, failUnmappedColumns, warningUnmappedColumns, escapeColumnNames, quoteTableName, attributePrefix); } else if (UPDATE_TYPE.equals(statementType)) { sql = generateUpdate(jsonNode, attributes, fqTableName, updateKeys, schema, translateFieldNames, ignoreUnmappedFields, failUnmappedColumns, warningUnmappedColumns, escapeColumnNames, quoteTableName, attributePrefix); } else { sql = generateDelete(jsonNode, attributes, fqTableName, schema, translateFieldNames, ignoreUnmappedFields, failUnmappedColumns, warningUnmappedColumns, escapeColumnNames, quoteTableName, attributePrefix); } } catch (final ProcessException pe) { getLogger().error("Failed to convert {} to a SQL {} statement due to {}; routing to failure", new Object[] { flowFile, statementType, pe.toString() }, pe); session.remove(created); session.transfer(flowFile, REL_FAILURE); return; } FlowFile sqlFlowFile = session.create(flowFile); created.add(sqlFlowFile); sqlFlowFile = session.write(sqlFlowFile, new OutputStreamCallback() { @Override public void process(final OutputStream out) throws IOException { out.write(sql.getBytes(StandardCharsets.UTF_8)); } }); attributes.put(CoreAttributes.MIME_TYPE.key(), "text/plain"); attributes.put(attributePrefix + ".table", tableName); attributes.put(FRAGMENT_ID.key(), fragmentIdentifier); attributes.put(FRAGMENT_COUNT.key(), String.valueOf(arrayNode.size())); attributes.put(FRAGMENT_INDEX.key(), String.valueOf(i)); if (catalog != null) { attributes.put(attributePrefix + ".catalog", catalog); } sqlFlowFile = session.putAllAttributes(sqlFlowFile, attributes); session.transfer(sqlFlowFile, REL_SQL); } FlowFile newFlowFile = copyAttributesToOriginal(session, flowFile, fragmentIdentifier, arrayNode.size()); session.transfer(newFlowFile, REL_ORIGINAL); } private Set<String> getNormalizedColumnNames(final JsonNode node, final boolean translateFieldNames) { final Set<String> normalizedFieldNames = new HashSet<>(); final Iterator<String> fieldNameItr = node.getFieldNames(); while (fieldNameItr.hasNext()) { normalizedFieldNames.add(normalizeColumnName(fieldNameItr.next(), translateFieldNames)); } return normalizedFieldNames; } private String generateInsert(final JsonNode rootNode, final Map<String, String> attributes, final String tableName, final TableSchema schema, final boolean translateFieldNames, final boolean ignoreUnmappedFields, final boolean failUnmappedColumns, final boolean warningUnmappedColumns, boolean escapeColumnNames, boolean quoteTableName, final String attributePrefix) { final Set<String> normalizedFieldNames = getNormalizedColumnNames(rootNode, translateFieldNames); for (final String requiredColName : schema.getRequiredColumnNames()) { final String normalizedColName = normalizeColumnName(requiredColName, translateFieldNames); if (!normalizedFieldNames.contains(normalizedColName)) { String missingColMessage = "JSON does not have a value for the Required column '" + requiredColName + "'"; if (failUnmappedColumns) { getLogger().error(missingColMessage); throw new ProcessException(missingColMessage); } else if (warningUnmappedColumns) { getLogger().warn(missingColMessage); } } } final StringBuilder sqlBuilder = new StringBuilder(); int fieldCount = 0; sqlBuilder.append("INSERT INTO "); if (quoteTableName) { sqlBuilder.append(schema.getQuotedIdentifierString()) .append(tableName) .append(schema.getQuotedIdentifierString()); } else { sqlBuilder.append(tableName); } sqlBuilder.append(" ("); // iterate over all of the elements in the JSON, building the SQL statement by adding the column names, as well as // adding the column value to a "<sql>.args.N.value" attribute and the type of a "<sql>.args.N.type" attribute add the // columns that we are inserting into final Iterator<String> fieldNames = rootNode.getFieldNames(); while (fieldNames.hasNext()) { final String fieldName = fieldNames.next(); final ColumnDescription desc = schema.getColumns().get(normalizeColumnName(fieldName, translateFieldNames)); if (desc == null && !ignoreUnmappedFields) { throw new ProcessException("Cannot map JSON field '" + fieldName + "' to any column in the database"); } if (desc != null) { if (fieldCount++ > 0) { sqlBuilder.append(", "); } if(escapeColumnNames){ sqlBuilder.append(schema.getQuotedIdentifierString()) .append(desc.getColumnName()) .append(schema.getQuotedIdentifierString()); } else { sqlBuilder.append(desc.getColumnName()); } final int sqlType = desc.getDataType(); attributes.put(attributePrefix + ".args." + fieldCount + ".type", String.valueOf(sqlType)); final Integer colSize = desc.getColumnSize(); final JsonNode fieldNode = rootNode.get(fieldName); if (!fieldNode.isNull()) { String fieldValue = createSqlStringValue(fieldNode, colSize, sqlType); attributes.put(attributePrefix + ".args." + fieldCount + ".value", fieldValue); } } } // complete the SQL statements by adding ?'s for all of the values to be escaped. sqlBuilder.append(") VALUES ("); for (int i=0; i < fieldCount; i++) { if (i > 0) { sqlBuilder.append(", "); } sqlBuilder.append("?"); } sqlBuilder.append(")"); if (fieldCount == 0) { throw new ProcessException("None of the fields in the JSON map to the columns defined by the " + tableName + " table"); } return sqlBuilder.toString(); } /** * Try to create correct SQL String representation of value. * */ protected static String createSqlStringValue(final JsonNode fieldNode, final Integer colSize, final int sqlType) { String fieldValue = fieldNode.asText(); switch (sqlType) { // only "true" is considered true, everything else is false case Types.BOOLEAN: fieldValue = Boolean.valueOf(fieldValue).toString(); break; // Don't truncate numeric types. case Types.BIT: case Types.TINYINT: case Types.SMALLINT: case Types.INTEGER: case Types.BIGINT: case Types.REAL: case Types.FLOAT: case Types.DOUBLE: case Types.DECIMAL: case Types.NUMERIC: if (fieldNode.isBoolean()) { // Convert boolean to number representation for databases those don't support boolean type. fieldValue = fieldNode.asBoolean() ? "1" : "0"; } break; // Don't truncate DATE, TIME and TIMESTAMP types. We assume date and time is already correct in long representation. // Specifically, milliseconds since January 1, 1970, 00:00:00 GMT // However, for TIMESTAMP, PutSQL accepts optional timestamp format via FlowFile attribute. // See PutSQL.setParameter method and NIFI-3430 for detail. // Alternatively, user can use JSONTreeReader and PutDatabaseRecord to handle date format more efficiently. case Types.DATE: case Types.TIME: case Types.TIMESTAMP: break; // Truncate string data types only. case Types.CHAR: case Types.VARCHAR: case Types.LONGVARCHAR: case Types.NCHAR: case Types.NVARCHAR: case Types.LONGNVARCHAR: if (colSize != null && fieldValue.length() > colSize) { fieldValue = fieldValue.substring(0, colSize); } break; } return fieldValue; } private String generateUpdate(final JsonNode rootNode, final Map<String, String> attributes, final String tableName, final String updateKeys, final TableSchema schema, final boolean translateFieldNames, final boolean ignoreUnmappedFields, final boolean failUnmappedColumns, final boolean warningUnmappedColumns, boolean escapeColumnNames, boolean quoteTableName, final String attributePrefix) { final Set<String> updateKeyNames; if (updateKeys == null) { updateKeyNames = schema.getPrimaryKeyColumnNames(); } else { updateKeyNames = new HashSet<>(); for (final String updateKey : updateKeys.split(",")) { updateKeyNames.add(updateKey.trim()); } } if (updateKeyNames.isEmpty()) { throw new ProcessException("Table '" + tableName + "' does not have a Primary Key and no Update Keys were specified"); } final StringBuilder sqlBuilder = new StringBuilder(); int fieldCount = 0; sqlBuilder.append("UPDATE "); if (quoteTableName) { sqlBuilder.append(schema.getQuotedIdentifierString()) .append(tableName) .append(schema.getQuotedIdentifierString()); } else { sqlBuilder.append(tableName); } sqlBuilder.append(" SET "); // Create a Set of all normalized Update Key names, and ensure that there is a field in the JSON // for each of the Update Key fields. final Set<String> normalizedFieldNames = getNormalizedColumnNames(rootNode, translateFieldNames); final Set<String> normalizedUpdateNames = new HashSet<>(); for (final String uk : updateKeyNames) { final String normalizedUK = normalizeColumnName(uk, translateFieldNames); normalizedUpdateNames.add(normalizedUK); if (!normalizedFieldNames.contains(normalizedUK)) { String missingColMessage = "JSON does not have a value for the " + (updateKeys == null ? "Primary" : "Update") + "Key column '" + uk + "'"; if (failUnmappedColumns) { getLogger().error(missingColMessage); throw new ProcessException(missingColMessage); } else if (warningUnmappedColumns) { getLogger().warn(missingColMessage); } } } // iterate over all of the elements in the JSON, building the SQL statement by adding the column names, as well as // adding the column value to a "<sql>.args.N.value" attribute and the type of a "<sql>.args.N.type" attribute add the // columns that we are inserting into Iterator<String> fieldNames = rootNode.getFieldNames(); while (fieldNames.hasNext()) { final String fieldName = fieldNames.next(); final String normalizedColName = normalizeColumnName(fieldName, translateFieldNames); final ColumnDescription desc = schema.getColumns().get(normalizedColName); if (desc == null) { if (!ignoreUnmappedFields) { throw new ProcessException("Cannot map JSON field '" + fieldName + "' to any column in the database"); } else { continue; } } // Check if this column is an Update Key. If so, skip it for now. We will come // back to it after we finish the SET clause if (normalizedUpdateNames.contains(normalizedColName)) { continue; } if (fieldCount++ > 0) { sqlBuilder.append(", "); } if(escapeColumnNames){ sqlBuilder.append(schema.getQuotedIdentifierString()) .append(desc.getColumnName()) .append(schema.getQuotedIdentifierString()); } else { sqlBuilder.append(desc.getColumnName()); } sqlBuilder.append(" = ?"); final int sqlType = desc.getDataType(); attributes.put(attributePrefix + ".args." + fieldCount + ".type", String.valueOf(sqlType)); final Integer colSize = desc.getColumnSize(); final JsonNode fieldNode = rootNode.get(fieldName); if (!fieldNode.isNull()) { String fieldValue = createSqlStringValue(fieldNode, colSize, sqlType); attributes.put(attributePrefix + ".args." + fieldCount + ".value", fieldValue); } } // Set the WHERE clause based on the Update Key values sqlBuilder.append(" WHERE "); fieldNames = rootNode.getFieldNames(); int whereFieldCount = 0; while (fieldNames.hasNext()) { final String fieldName = fieldNames.next(); final String normalizedColName = normalizeColumnName(fieldName, translateFieldNames); final ColumnDescription desc = schema.getColumns().get(normalizedColName); if (desc == null) { continue; } // Check if this column is a Update Key. If so, skip it for now. We will come // back to it after we finish the SET clause if (!normalizedUpdateNames.contains(normalizedColName)) { continue; } if (whereFieldCount++ > 0) { sqlBuilder.append(" AND "); } fieldCount++; if(escapeColumnNames){ sqlBuilder.append(schema.getQuotedIdentifierString()) .append(normalizedColName) .append(schema.getQuotedIdentifierString()); } else { sqlBuilder.append(normalizedColName); } sqlBuilder.append(" = ?"); final int sqlType = desc.getDataType(); attributes.put(attributePrefix + ".args." + fieldCount + ".type", String.valueOf(sqlType)); final Integer colSize = desc.getColumnSize(); String fieldValue = rootNode.get(fieldName).asText(); if (colSize != null && fieldValue.length() > colSize) { fieldValue = fieldValue.substring(0, colSize); } attributes.put(attributePrefix + ".args." + fieldCount + ".value", fieldValue); } return sqlBuilder.toString(); } private String generateDelete(final JsonNode rootNode, final Map<String, String> attributes, final String tableName, final TableSchema schema, final boolean translateFieldNames, final boolean ignoreUnmappedFields, final boolean failUnmappedColumns, final boolean warningUnmappedColumns, boolean escapeColumnNames, boolean quoteTableName, final String attributePrefix) { final Set<String> normalizedFieldNames = getNormalizedColumnNames(rootNode, translateFieldNames); for (final String requiredColName : schema.getRequiredColumnNames()) { final String normalizedColName = normalizeColumnName(requiredColName, translateFieldNames); if (!normalizedFieldNames.contains(normalizedColName)) { String missingColMessage = "JSON does not have a value for the Required column '" + requiredColName + "'"; if (failUnmappedColumns) { getLogger().error(missingColMessage); throw new ProcessException(missingColMessage); } else if (warningUnmappedColumns) { getLogger().warn(missingColMessage); } } } final StringBuilder sqlBuilder = new StringBuilder(); int fieldCount = 0; sqlBuilder.append("DELETE FROM "); if (quoteTableName) { sqlBuilder.append(schema.getQuotedIdentifierString()) .append(tableName) .append(schema.getQuotedIdentifierString()); } else { sqlBuilder.append(tableName); } sqlBuilder.append(" WHERE "); // iterate over all of the elements in the JSON, building the SQL statement by adding the column names, as well as // adding the column value to a "<sql>.args.N.value" attribute and the type of a "<sql>.args.N.type" attribute add the // columns that we are inserting into final Iterator<String> fieldNames = rootNode.getFieldNames(); while (fieldNames.hasNext()) { final String fieldName = fieldNames.next(); final ColumnDescription desc = schema.getColumns().get(normalizeColumnName(fieldName, translateFieldNames)); if (desc == null && !ignoreUnmappedFields) { throw new ProcessException("Cannot map JSON field '" + fieldName + "' to any column in the database"); } if (desc != null) { if (fieldCount++ > 0) { sqlBuilder.append(" AND "); } if (escapeColumnNames) { sqlBuilder.append(schema.getQuotedIdentifierString()) .append(desc.getColumnName()) .append(schema.getQuotedIdentifierString()); } else { sqlBuilder.append(desc.getColumnName()); } sqlBuilder.append(" = ?"); final int sqlType = desc.getDataType(); attributes.put(attributePrefix + ".args." + fieldCount + ".type", String.valueOf(sqlType)); final Integer colSize = desc.getColumnSize(); final JsonNode fieldNode = rootNode.get(fieldName); if (!fieldNode.isNull()) { String fieldValue = fieldNode.asText(); if (colSize != null && fieldValue.length() > colSize) { fieldValue = fieldValue.substring(0, colSize); } attributes.put(attributePrefix + ".args." + fieldCount + ".value", fieldValue); } } } if (fieldCount == 0) { throw new ProcessException("None of the fields in the JSON map to the columns defined by the " + tableName + " table"); } return sqlBuilder.toString(); } private static String normalizeColumnName(final String colName, final boolean translateColumnNames) { return translateColumnNames ? colName.toUpperCase().replace("_", "") : colName; } private static class TableSchema { private List<String> requiredColumnNames; private Set<String> primaryKeyColumnNames; private Map<String, ColumnDescription> columns; private String quotedIdentifierString; private TableSchema(final List<ColumnDescription> columnDescriptions, final boolean translateColumnNames, final Set<String> primaryKeyColumnNames, final String quotedIdentifierString) { this.columns = new HashMap<>(); this.primaryKeyColumnNames = primaryKeyColumnNames; this.quotedIdentifierString = quotedIdentifierString; this.requiredColumnNames = new ArrayList<>(); for (final ColumnDescription desc : columnDescriptions) { columns.put(ConvertJSONToSQL.normalizeColumnName(desc.columnName, translateColumnNames), desc); if (desc.isRequired()) { requiredColumnNames.add(desc.columnName); } } } public Map<String, ColumnDescription> getColumns() { return columns; } public List<String> getRequiredColumnNames() { return requiredColumnNames; } public Set<String> getPrimaryKeyColumnNames() { return primaryKeyColumnNames; } public String getQuotedIdentifierString() { return quotedIdentifierString; } public static TableSchema from(final Connection conn, final String catalog, final String schema, final String tableName, final boolean translateColumnNames, final boolean includePrimaryKeys) throws SQLException { final DatabaseMetaData dmd = conn.getMetaData(); try (final ResultSet colrs = dmd.getColumns(catalog, schema, tableName, "%")) { final List<ColumnDescription> cols = new ArrayList<>(); while (colrs.next()) { final ColumnDescription col = ColumnDescription.from(colrs); cols.add(col); } final Set<String> primaryKeyColumns = new HashSet<>(); if (includePrimaryKeys) { try (final ResultSet pkrs = conn.getMetaData().getPrimaryKeys(catalog, null, tableName)) { while (pkrs.next()) { final String colName = pkrs.getString("COLUMN_NAME"); primaryKeyColumns.add(normalizeColumnName(colName, translateColumnNames)); } } } return new TableSchema(cols, translateColumnNames, primaryKeyColumns, dmd.getIdentifierQuoteString()); } } } private static class ColumnDescription { private final String columnName; private final int dataType; private final boolean required; private final Integer columnSize; private ColumnDescription(final String columnName, final int dataType, final boolean required, final Integer columnSize) { this.columnName = columnName; this.dataType = dataType; this.required = required; this.columnSize = columnSize; } public int getDataType() { return dataType; } public Integer getColumnSize() { return columnSize; } public String getColumnName() { return columnName; } public boolean isRequired() { return required; } public static ColumnDescription from(final ResultSet resultSet) throws SQLException { final ResultSetMetaData md = resultSet.getMetaData(); List<String> columns = new ArrayList<>(); for (int i = 1; i < md.getColumnCount() + 1; i++) { columns.add(md.getColumnName(i)); } // COLUMN_DEF must be read first to work around Oracle bug, see NIFI-4279 for details final String defaultValue = resultSet.getString("COLUMN_DEF"); final String columnName = resultSet.getString("COLUMN_NAME"); final int dataType = resultSet.getInt("DATA_TYPE"); final int colSize = resultSet.getInt("COLUMN_SIZE"); final String nullableValue = resultSet.getString("IS_NULLABLE"); final boolean isNullable = "YES".equalsIgnoreCase(nullableValue) || nullableValue.isEmpty(); String autoIncrementValue = "NO"; if(columns.contains("IS_AUTOINCREMENT")){ autoIncrementValue = resultSet.getString("IS_AUTOINCREMENT"); } final boolean isAutoIncrement = "YES".equalsIgnoreCase(autoIncrementValue); final boolean required = !isNullable && !isAutoIncrement && defaultValue == null; return new ColumnDescription(columnName, dataType, required, colSize == 0 ? null : colSize); } } private static class SchemaKey { private final String catalog; private final String tableName; public SchemaKey(final String catalog, final String tableName) { this.catalog = catalog; this.tableName = tableName; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((catalog == null) ? 0 : catalog.hashCode()); result = prime * result + ((tableName == null) ? 0 : tableName.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final SchemaKey other = (SchemaKey) obj; if (catalog == null) { if (other.catalog != null) { return false; } } else if (!catalog.equals(other.catalog)) { return false; } if (tableName == null) { if (other.tableName != null) { return false; } } else if (!tableName.equals(other.tableName)) { return false; } return true; } } }
/* * Zed Attack Proxy (ZAP) and its related class files. * * ZAP is an HTTP/HTTPS proxy for assessing web application security. * * Copyright 2010 psiinon@gmail.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.zaproxy.zap.extension.brk; import java.awt.Component; import java.awt.Event; import java.awt.EventQueue; import java.awt.Toolkit; import java.awt.event.KeyEvent; import java.net.MalformedURLException; import java.net.URL; import java.security.InvalidParameterException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.swing.JList; import javax.swing.JTree; import javax.swing.KeyStroke; import javax.swing.tree.TreePath; import org.apache.log4j.Logger; import org.parosproxy.paros.Constant; import org.parosproxy.paros.control.Control; import org.parosproxy.paros.control.Control.Mode; import org.parosproxy.paros.extension.ExtensionAdaptor; import org.parosproxy.paros.extension.ExtensionHook; import org.parosproxy.paros.extension.ExtensionHookView; import org.parosproxy.paros.extension.OptionsChangedListener; import org.parosproxy.paros.extension.SessionChangedListener; import org.parosproxy.paros.model.HistoryReference; import org.parosproxy.paros.model.OptionsParam; import org.parosproxy.paros.model.Session; import org.parosproxy.paros.model.SiteNode; import org.parosproxy.paros.view.View; import org.zaproxy.zap.extension.api.API; import org.zaproxy.zap.extension.brk.impl.http.HttpBreakpointMessage; import org.zaproxy.zap.extension.brk.impl.http.HttpBreakpointMessage.Location; import org.zaproxy.zap.extension.brk.impl.http.HttpBreakpointMessage.Match; import org.zaproxy.zap.extension.brk.impl.http.HttpBreakpointsUiManagerInterface; import org.zaproxy.zap.extension.brk.impl.http.ProxyListenerBreak; import org.zaproxy.zap.extension.help.ExtensionHelp; import org.zaproxy.zap.extension.httppanel.Message; import org.zaproxy.zap.view.ZapMenuItem; public class ExtensionBreak extends ExtensionAdaptor implements SessionChangedListener, OptionsChangedListener { public enum DialogType {NONE, ADD, EDIT, REMOVE}; public static final String NAME = "ExtensionBreak"; private static final Logger logger = Logger.getLogger(ExtensionBreak.class); private BreakPanel breakPanel = null; private ProxyListenerBreak proxyListener = null; private BreakpointsPanel breakpointsPanel = null; private PopupMenuEditBreak popupMenuEditBreak = null; private PopupMenuRemove popupMenuRemove = null; private BreakpointMessageHandler breakpointMessageHandler; private DialogType currentDialogType = DialogType.NONE; private Map<Class<? extends BreakpointMessageInterface>, BreakpointsUiManagerInterface> mapBreakpointUiManager; private Map<Class<? extends Message>, BreakpointsUiManagerInterface> mapMessageUiManager; private Mode mode = Control.getSingleton().getMode(); private BreakpointsParam breakpointsParams; private BreakpointsOptionsPanel breakpointsOptionsPanel; private HttpBreakpointsUiManagerInterface httpBreakpoints; private ZapMenuItem menuBreakOnRequests = null; private ZapMenuItem menuBreakOnResponses = null; private ZapMenuItem menuStep = null; private ZapMenuItem menuContinue = null; private ZapMenuItem menuDrop = null; private ZapMenuItem menuHttpBreakpoint = null; private BreakAPI api = new BreakAPI(this); public ExtensionBreak() { super(); initialize(); } public ExtensionBreak(String name) { super(name); } private void initialize() { this.setName(NAME); this.setOrder(24); } public BreakPanel getBreakPanel() { if (breakPanel == null) { breakPanel = new BreakPanel(this, getOptionsParam()); breakPanel.setName(Constant.messages.getString("tab.break")); } return breakPanel; } @SuppressWarnings("deprecation") @Override public void hook(ExtensionHook extensionHook) { super.hook(extensionHook); extensionHook.addOptionsParamSet(getOptionsParam()); if (getView() != null) { breakpointMessageHandler = new BreakpointMessageHandler(getBreakPanel()); breakpointMessageHandler.setEnabledBreakpoints(getBreakpointsModel().getBreakpointsEnabledList()); ExtensionHookView pv = extensionHook.getHookView(); pv.addWorkPanel(getBreakPanel()); pv.addOptionPanel(getOptionsPanel()); extensionHook.getHookMenu().addAnalyseMenuItem(extensionHook.getHookMenu().getMenuSeparator()); extensionHook.getHookView().addStatusPanel(getBreakpointsPanel()); extensionHook.getHookMenu().addPopupMenuItem(getPopupMenuEdit()); extensionHook.getHookMenu().addPopupMenuItem(getPopupMenuDelete()); mapBreakpointUiManager = new HashMap<>(); mapMessageUiManager = new HashMap<>(); httpBreakpoints = new HttpBreakpointsUiManagerInterface(extensionHook.getHookMenu(), this); addBreakpointsUiManager(httpBreakpoints); extensionHook.getHookMenu().addToolsMenuItem(getMenuToggleBreakOnRequests()); extensionHook.getHookMenu().addToolsMenuItem(getMenuToggleBreakOnResponses()); extensionHook.getHookMenu().addToolsMenuItem(getMenuStep()); extensionHook.getHookMenu().addToolsMenuItem(getMenuContinue()); extensionHook.getHookMenu().addToolsMenuItem(getMenuDrop()); extensionHook.getHookMenu().addToolsMenuItem(getMenuAddHttpBreakpoint()); extensionHook.addProxyListener(getProxyListenerBreak()); // APIs are usually loaded even if the view is null, as they are specifically for daemon mode // However in this case the API isnt really of any use unless the UI is available API.getInstance().registerApiImplementor(api); extensionHook.addSessionListener(this); extensionHook.addOptionsChangedListener(this); ExtensionHelp.enableHelpKey(getBreakPanel(), "ui.tabs.break"); ExtensionHelp.enableHelpKey(getBreakpointsPanel(), "ui.tabs.breakpoints"); } } private BreakpointsParam getOptionsParam() { if (breakpointsParams == null) { breakpointsParams = new BreakpointsParam(); } return breakpointsParams; } private BreakpointsOptionsPanel getOptionsPanel() { if (breakpointsOptionsPanel == null) { breakpointsOptionsPanel = new BreakpointsOptionsPanel(); } return breakpointsOptionsPanel; } private BreakpointsPanel getBreakpointsPanel() { if (breakpointsPanel == null) { breakpointsPanel = new BreakpointsPanel(this); } return breakpointsPanel; } public void addBreakpoint(BreakpointMessageInterface breakpoint) { this.getBreakpointsPanel().addBreakpoint(breakpoint); // Switch to the panel for some visual feedback this.getBreakpointsPanel().setTabFocus(); } public void editBreakpoint(BreakpointMessageInterface oldBreakpoint, BreakpointMessageInterface newBreakpoint) { this.getBreakpointsPanel().editBreakpoint(oldBreakpoint, newBreakpoint); } public void removeBreakpoint(BreakpointMessageInterface breakpoint) { this.getBreakpointsPanel().removeBreakpoint(breakpoint); } public List<BreakpointMessageInterface> getBreakpointsList() { return getBreakpointsModel().getBreakpointsList(); } public BreakpointMessageInterface getUiSelectedBreakpoint() { return getBreakpointsPanel().getSelectedBreakpoint(); } public void addBreakpointsUiManager(BreakpointsUiManagerInterface uiManager) { mapBreakpointUiManager.put(uiManager.getBreakpointClass(), uiManager); mapMessageUiManager.put(uiManager.getMessageClass(), uiManager); } public void removeBreakpointsUiManager(BreakpointsUiManagerInterface uiManager) { mapBreakpointUiManager.remove(uiManager.getBreakpointClass()); mapMessageUiManager.remove(uiManager.getMessageClass()); } public void setBreakAllRequests(boolean brk) { this.getBreakPanel().setBreakAllRequests(brk); } public void setBreakAllResponses(boolean brk) { this.getBreakPanel().setBreakAllResponses(brk); } public void addHttpBreakpoint(String string, String location, String match, boolean inverse, boolean ignoreCase) { Location loc; Match mtch; try { loc = Location.valueOf(location); } catch (Exception e) { throw new InvalidParameterException("location must be one of " + Arrays.toString(Location.values())); } try { mtch = Match.valueOf(match); } catch (Exception e) { throw new InvalidParameterException("match must be one of " + Arrays.toString(Match.values())); } this.addBreakpoint(new HttpBreakpointMessage(string, loc, mtch, inverse, ignoreCase)); } public void removeHttpBreakpoint(String string, String location, String match, boolean inverse, boolean ignoreCase) { Location loc; Match mtch; try { loc = Location.valueOf(location); } catch (Exception e) { throw new InvalidParameterException("location must be one of " + Arrays.toString(Location.values())); } try { mtch = Match.valueOf(match); } catch (Exception e) { throw new InvalidParameterException("match must be one of " + Arrays.toString(Match.values())); } this.removeBreakpoint(new HttpBreakpointMessage(string, loc, mtch, inverse, ignoreCase)); } public void addUiBreakpoint(Message aMessage) { BreakpointsUiManagerInterface uiManager = getBreakpointUiManager(aMessage.getClass()); if (uiManager != null) { uiManager.handleAddBreakpoint(aMessage); } } private BreakpointsUiManagerInterface getBreakpointUiManager(Class<?> clazz) { if (!Message.class.isAssignableFrom(clazz)) { return null; } BreakpointsUiManagerInterface uiManager = mapMessageUiManager.get(clazz); if (uiManager == null) { uiManager = getBreakpointUiManager(clazz.getSuperclass()); } return uiManager; } public void editUiSelectedBreakpoint() { BreakpointMessageInterface breakpoint = getBreakpointsPanel().getSelectedBreakpoint(); if (breakpoint != null) { BreakpointsUiManagerInterface uiManager = mapBreakpointUiManager.get(breakpoint.getClass()); if (uiManager != null) { uiManager.handleEditBreakpoint(breakpoint); } } } public void removeUiSelectedBreakpoint() { BreakpointMessageInterface breakpoint = getBreakpointsPanel().getSelectedBreakpoint(); if (breakpoint != null) { BreakpointsUiManagerInterface uiManager = mapBreakpointUiManager.get(breakpoint.getClass()); if (uiManager != null) { uiManager.handleRemoveBreakpoint(breakpoint); } } } private BreakpointsTableModel getBreakpointsModel() { return (BreakpointsTableModel)this.getBreakpointsPanel().getBreakpoints().getModel(); } private ProxyListenerBreak getProxyListenerBreak() { if (proxyListener == null) { proxyListener = new ProxyListenerBreak(getModel(), this); } return proxyListener; } private PopupMenuEditBreak getPopupMenuEdit() { if (popupMenuEditBreak == null) { popupMenuEditBreak = new PopupMenuEditBreak(); popupMenuEditBreak.setExtension(this); } return popupMenuEditBreak; } private PopupMenuRemove getPopupMenuDelete() { if (popupMenuRemove == null) { popupMenuRemove = new PopupMenuRemove(); popupMenuRemove.setExtension(this); } return popupMenuRemove; } private ZapMenuItem getMenuToggleBreakOnRequests() { if (menuBreakOnRequests == null) { menuBreakOnRequests = new ZapMenuItem("menu.tools.brk.req", KeyStroke.getKeyStroke(KeyEvent.VK_B, Toolkit.getDefaultToolkit().getMenuShortcutKeyMask(), false)); menuBreakOnRequests.addActionListener(new java.awt.event.ActionListener() { @Override public void actionPerformed(java.awt.event.ActionEvent e) { if (getOptionsParam().getButtonMode() == BreakpointsParam.BUTTON_MODE_SIMPLE) { // Single button mode - toggle break on all getBreakPanel().setBreakAll(! getBreakPanel().isBreakAll()); } else { // Toggle break on requests getBreakPanel().setBreakAllRequests(! getBreakPanel().isBreakRequest()); } } }); } return menuBreakOnRequests; } private ZapMenuItem getMenuToggleBreakOnResponses() { if (menuBreakOnResponses == null) { menuBreakOnResponses = new ZapMenuItem("menu.tools.brk.resp", KeyStroke.getKeyStroke(KeyEvent.VK_B, Toolkit.getDefaultToolkit().getMenuShortcutKeyMask() | Event.ALT_MASK, false)); menuBreakOnResponses.addActionListener(new java.awt.event.ActionListener() { @Override public void actionPerformed(java.awt.event.ActionEvent e) { if (getOptionsParam().getButtonMode() == BreakpointsParam.BUTTON_MODE_SIMPLE) { // Single button mode - toggle break on all getBreakPanel().setBreakAll(! getBreakPanel().isBreakAll()); } else { // Toggle break on Responses getBreakPanel().setBreakAllResponses(! getBreakPanel().isBreakResponse()); } } }); } return menuBreakOnResponses; } private ZapMenuItem getMenuStep() { if (menuStep == null) { menuStep = new ZapMenuItem("menu.tools.brk.step", KeyStroke.getKeyStroke(KeyEvent.VK_S, Toolkit.getDefaultToolkit().getMenuShortcutKeyMask(), false)); menuStep.addActionListener(new java.awt.event.ActionListener() { @Override public void actionPerformed(java.awt.event.ActionEvent e) { if (getBreakPanel().isHoldMessage()) { // Menu currently always enabled, but dont do anything unless a message is being held getBreakPanel().step(); } } }); } return menuStep; } private ZapMenuItem getMenuContinue() { if (menuContinue == null) { menuContinue = new ZapMenuItem("menu.tools.brk.cont", KeyStroke.getKeyStroke(KeyEvent.VK_C, Toolkit.getDefaultToolkit().getMenuShortcutKeyMask(), false)); menuContinue.addActionListener(new java.awt.event.ActionListener() { @Override public void actionPerformed(java.awt.event.ActionEvent e) { if (getBreakPanel().isHoldMessage()) { // Menu currently always enabled, but dont do anything unless a message is being held getBreakPanel().cont(); } } }); } return menuContinue; } private ZapMenuItem getMenuDrop() { if (menuDrop == null) { menuDrop = new ZapMenuItem("menu.tools.brk.drop", KeyStroke.getKeyStroke(KeyEvent.VK_X, Toolkit.getDefaultToolkit().getMenuShortcutKeyMask(), false)); menuDrop.addActionListener(new java.awt.event.ActionListener() { @Override public void actionPerformed(java.awt.event.ActionEvent e) { if (getBreakPanel().isHoldMessage()) { // Menu currently always enabled, but dont do anything unless a message is being held getBreakPanel().drop(); } } }); } return menuDrop; } private ZapMenuItem getMenuAddHttpBreakpoint() { if (menuHttpBreakpoint == null) { menuHttpBreakpoint = new ZapMenuItem("menu.tools.brk.custom", KeyStroke.getKeyStroke(KeyEvent.VK_A, Toolkit.getDefaultToolkit().getMenuShortcutKeyMask(), false)); menuHttpBreakpoint.addActionListener(new java.awt.event.ActionListener() { @Override public void actionPerformed(java.awt.event.ActionEvent e) { // Check to see if anything is selected in the main tabs String url = ""; Component c = View.getSingleton().getMainFrame().getFocusOwner(); if (c != null) { if (c instanceof JList) { // Handles the history list and similar @SuppressWarnings("rawtypes") Object sel = ((JList)c).getSelectedValue(); try { if (sel != null && sel instanceof HistoryReference && ((HistoryReference)sel).getURI() != null) { url = ((HistoryReference)sel).getURI().toString(); } } catch (Exception e1) { // Ignore } } else if (c instanceof JTree) { // Handles the Sites tree TreePath path = ((JTree)c).getSelectionPath(); try { if (path != null && path.getLastPathComponent() instanceof SiteNode) { url = ((SiteNode)path.getLastPathComponent()).getHistoryReference().getURI().toString(); } } catch (Exception e1) { // Ignore } } } httpBreakpoints.handleAddBreakpoint(url); } }); } return menuHttpBreakpoint; } public boolean canAddBreakpoint() { return (currentDialogType == DialogType.NONE || currentDialogType == DialogType.ADD); } public boolean canEditBreakpoint() { return (currentDialogType == DialogType.NONE || currentDialogType == DialogType.EDIT); } public boolean canRemoveBreakpoint() { return (currentDialogType == DialogType.NONE || currentDialogType == DialogType.REMOVE); } public void dialogShown(DialogType type) { currentDialogType = type; } public void dialogClosed() { currentDialogType = DialogType.NONE; } @Override public String getAuthor() { return Constant.ZAP_TEAM; } @Override public String getDescription() { return Constant.messages.getString("brk.desc"); } @Override public URL getURL() { try { return new URL(Constant.ZAP_HOMEPAGE); } catch (MalformedURLException e) { return null; } } @Override public void sessionAboutToChange(final Session session) { if (EventQueue.isDispatchThread()) { sessionAboutToChange(); } else { try { EventQueue.invokeAndWait(new Runnable() { @Override public void run() { sessionAboutToChange(); } }); } catch (Exception e) { logger.error(e.getMessage(), e); } } } @Override public void sessionChanged(Session session) { if (getView() == null) { return; } getBreakPanel().init(); } private void sessionAboutToChange() { if (getView() == null) { return; } getBreakPanel().reset(); } @Override public void sessionScopeChanged(Session session) { } @Override public void destroy() { if (breakPanel != null) { breakPanel.savePanels(); } } public boolean messageReceivedFromClient(Message aMessage) { if (mode.equals(Mode.safe)) { return true; } return breakpointMessageHandler.handleMessageReceivedFromClient(aMessage, mode.equals(Mode.protect)); } public boolean messageReceivedFromServer(Message aMessage) { if (mode.equals(Mode.safe)) { return true; } return breakpointMessageHandler.handleMessageReceivedFromServer(aMessage, mode.equals(Mode.protect)); } /** * Exposes list of enabled breakpoints. * * @return list of enabled breakpoints */ public List<BreakpointMessageInterface> getBreakpointsEnabledList() { if (mode.equals(Mode.safe)) { return new ArrayList<>(); } return getBreakpointsModel().getBreakpointsEnabledList(); } @Override public void sessionModeChanged(Mode mode) { this.mode = mode; if (getView() == null) { return; } this.getBreakPanel().sessionModeChanged(mode); } public void setBreakOnId(String id, boolean enable) { logger.debug("setBreakOnId " + id + " " + enable); if (enable) { breakpointMessageHandler.getEnabledKeyBreakpoints().add(id); } else { breakpointMessageHandler.getEnabledKeyBreakpoints().remove(id); } } @Override public void optionsChanged(OptionsParam optionsParam) { if (View.isInitialised()) { this.getBreakPanel().setButtonMode( optionsParam.getParamSet(BreakpointsParam.class).getButtonMode()); } } @Override public void optionsLoaded() { if (View.isInitialised()) { this.getBreakPanel().setButtonMode(this.getOptionsParam().getButtonMode()); } } @Override /** * No database tables used, so all supported */ public boolean supportsDb(String type) { return true; } }
/* * Copyright 1994-2004 Sun Microsystems, Inc. All Rights Reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Sun designates this * particular file as subject to the "Classpath" exception as provided * by Sun in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara, * CA 95054 USA or visit www.sun.com if you need additional information or * have any questions. */ package java.util; import java.lang.*; /** * The string tokenizer class allows an application to break a * string into tokens. The tokenization method is much simpler than * the one used by the <code>StreamTokenizer</code> class. The * <code>StringTokenizer</code> methods do not distinguish among * identifiers, numbers, and quoted strings, nor do they recognize * and skip comments. * <p> * The set of delimiters (the characters that separate tokens) may * be specified either at creation time or on a per-token basis. * <p> * An instance of <code>StringTokenizer</code> behaves in one of two * ways, depending on whether it was created with the * <code>returnDelims</code> flag having the value <code>true</code> * or <code>false</code>: * <ul> * <li>If the flag is <code>false</code>, delimiter characters serve to * separate tokens. A token is a maximal sequence of consecutive * characters that are not delimiters. * <li>If the flag is <code>true</code>, delimiter characters are themselves * considered to be tokens. A token is thus either one delimiter * character, or a maximal sequence of consecutive characters that are * not delimiters. * </ul><p> * A <tt>StringTokenizer</tt> object internally maintains a current * position within the string to be tokenized. Some operations advance this * current position past the characters processed.<p> * A token is returned by taking a substring of the string that was used to * create the <tt>StringTokenizer</tt> object. * <p> * The following is one example of the use of the tokenizer. The code: * <blockquote><pre> * StringTokenizer st = new StringTokenizer("this is a test"); * while (st.hasMoreTokens()) { * System.out.println(st.nextToken()); * } * </pre></blockquote> * <p> * prints the following output: * <blockquote><pre> * this * is * a * test * </pre></blockquote> * * <p> * <tt>StringTokenizer</tt> is a legacy class that is retained for * compatibility reasons although its use is discouraged in new code. It is * recommended that anyone seeking this functionality use the <tt>split</tt> * method of <tt>String</tt> or the java.util.regex package instead. * <p> * The following example illustrates how the <tt>String.split</tt> * method can be used to break up a string into its basic tokens: * <blockquote><pre> * String[] result = "this is a test".split("\\s"); * for (int x=0; x&lt;result.length; x++) * System.out.println(result[x]); * </pre></blockquote> * <p> * prints the following output: * <blockquote><pre> * this * is * a * test * </pre></blockquote> * * @author unascribed * @see java.io.StreamTokenizer * @since JDK1.0 */ public class StringTokenizer implements Enumeration<Object> { private int currentPosition; private int newPosition; private int maxPosition; private String str; private String delimiters; private boolean retDelims; private boolean delimsChanged; /** * maxDelimCodePoint stores the value of the delimiter character with the * highest value. It is used to optimize the detection of delimiter * characters. * * It is unlikely to provide any optimization benefit in the * hasSurrogates case because most string characters will be * smaller than the limit, but we keep it so that the two code * paths remain similar. */ private int maxDelimCodePoint; /** * If delimiters include any surrogates (including surrogate * pairs), hasSurrogates is true and the tokenizer uses the * different code path. This is because String.indexOf(int) * doesn't handle unpaired surrogates as a single character. */ private boolean hasSurrogates = false; /** * When hasSurrogates is true, delimiters are converted to code * points and isDelimiter(int) is used to determine if the given * codepoint is a delimiter. */ private int[] delimiterCodePoints; /** * Set maxDelimCodePoint to the highest char in the delimiter set. */ private void setMaxDelimCodePoint() { if (delimiters == null) { maxDelimCodePoint = 0; return; } int m = 0; int c; int count = 0; for (int i = 0; i < delimiters.length(); i += Character.charCount(c)) { c = delimiters.charAt(i); if (c >= Character.MIN_HIGH_SURROGATE && c <= Character.MAX_LOW_SURROGATE) { c = delimiters.codePointAt(i); hasSurrogates = true; } if (m < c) { m = c; } count++; } maxDelimCodePoint = m; if (hasSurrogates) { delimiterCodePoints = new int[count]; for (int i = 0, j = 0; i < count; i++, j += Character.charCount(c)) { c = delimiters.codePointAt(j); delimiterCodePoints[i] = c; } } } /** * Constructs a string tokenizer for the specified string. All * characters in the <code>delim</code> argument are the delimiters * for separating tokens. * <p> * If the <code>returnDelims</code> flag is <code>true</code>, then * the delimiter characters are also returned as tokens. Each * delimiter is returned as a string of length one. If the flag is * <code>false</code>, the delimiter characters are skipped and only * serve as separators between tokens. * <p> * Note that if <tt>delim</tt> is <tt>null</tt>, this constructor does * not throw an exception. However, trying to invoke other methods on the * resulting <tt>StringTokenizer</tt> may result in a * <tt>NullPointerException</tt>. * * @param str a string to be parsed. * @param delim the delimiters. * @param returnDelims flag indicating whether to return the delimiters * as tokens. * @exception NullPointerException if str is <CODE>null</CODE> */ public StringTokenizer(String str, String delim, boolean returnDelims) { currentPosition = 0; newPosition = -1; delimsChanged = false; this.str = str; maxPosition = str.length(); delimiters = delim; retDelims = returnDelims; setMaxDelimCodePoint(); } /** * Constructs a string tokenizer for the specified string. The * characters in the <code>delim</code> argument are the delimiters * for separating tokens. Delimiter characters themselves will not * be treated as tokens. * <p> * Note that if <tt>delim</tt> is <tt>null</tt>, this constructor does * not throw an exception. However, trying to invoke other methods on the * resulting <tt>StringTokenizer</tt> may result in a * <tt>NullPointerException</tt>. * * @param str a string to be parsed. * @param delim the delimiters. * @exception NullPointerException if str is <CODE>null</CODE> */ public StringTokenizer(String str, String delim) { this(str, delim, false); } /** * Constructs a string tokenizer for the specified string. The * tokenizer uses the default delimiter set, which is * <code>"&nbsp;&#92;t&#92;n&#92;r&#92;f"</code>: the space character, * the tab character, the newline character, the carriage-return character, * and the form-feed character. Delimiter characters themselves will * not be treated as tokens. * * @param str a string to be parsed. * @exception NullPointerException if str is <CODE>null</CODE> */ public StringTokenizer(String str) { this(str, " \t\n\r\f", false); } /** * Skips delimiters starting from the specified position. If retDelims * is false, returns the index of the first non-delimiter character at or * after startPos. If retDelims is true, startPos is returned. */ private int skipDelimiters(int startPos) { if (delimiters == null) { throw new NullPointerException(); } int position = startPos; while (!retDelims && position < maxPosition) { if (!hasSurrogates) { char c = str.charAt(position); if ((c > maxDelimCodePoint) || (delimiters.indexOf(c) < 0)) { break; } position++; } else { int c = str.codePointAt(position); if ((c > maxDelimCodePoint) || !isDelimiter(c)) { break; } position += Character.charCount(c); } } return position; } /** * Skips ahead from startPos and returns the index of the next delimiter * character encountered, or maxPosition if no such delimiter is found. */ private int scanToken(int startPos) { int position = startPos; while (position < maxPosition) { if (!hasSurrogates) { char c = str.charAt(position); if ((c <= maxDelimCodePoint) && (delimiters.indexOf(c) >= 0)) { break; } position++; } else { int c = str.codePointAt(position); if ((c <= maxDelimCodePoint) && isDelimiter(c)) { break; } position += Character.charCount(c); } } if (retDelims && (startPos == position)) { if (!hasSurrogates) { char c = str.charAt(position); if ((c <= maxDelimCodePoint) && (delimiters.indexOf(c) >= 0)) { position++; } } else { int c = str.codePointAt(position); if ((c <= maxDelimCodePoint) && isDelimiter(c)) { position += Character.charCount(c); } } } return position; } private boolean isDelimiter(int codePoint) { for (int i = 0; i < delimiterCodePoints.length; i++) { if (delimiterCodePoints[i] == codePoint) { return true; } } return false; } /** * Tests if there are more tokens available from this tokenizer's string. * If this method returns <tt>true</tt>, then a subsequent call to * <tt>nextToken</tt> with no argument will successfully return a token. * * @return <code>true</code> if and only if there is at least one token * in the string after the current position; <code>false</code> * otherwise. */ public boolean hasMoreTokens() { /* * Temporarily store this position and use it in the following * nextToken() method only if the delimiters haven't been changed in * that nextToken() invocation. */ newPosition = skipDelimiters(currentPosition); return newPosition < maxPosition; } /** * Returns the next token from this string tokenizer. * * @return the next token from this string tokenizer. * @exception NoSuchElementException if there are no more tokens in this * tokenizer's string. */ public String nextToken() { /* * If next position already computed in hasMoreElements() and * delimiters have changed between the computation and this invocation, * then use the computed value. */ currentPosition = (newPosition >= 0 && !delimsChanged) ? newPosition : skipDelimiters(currentPosition); /* Reset these anyway */ delimsChanged = false; newPosition = -1; if (currentPosition >= maxPosition) { throw new NoSuchElementException(); } int start = currentPosition; currentPosition = scanToken(currentPosition); return str.substring(start, currentPosition); } /** * Returns the next token in this string tokenizer's string. First, * the set of characters considered to be delimiters by this * <tt>StringTokenizer</tt> object is changed to be the characters in * the string <tt>delim</tt>. Then the next token in the string * after the current position is returned. The current position is * advanced beyond the recognized token. The new delimiter set * remains the default after this call. * * @param delim the new delimiters. * @return the next token, after switching to the new delimiter set. * @exception NoSuchElementException if there are no more tokens in this * tokenizer's string. * @exception NullPointerException if delim is <CODE>null</CODE> */ public String nextToken(String delim) { delimiters = delim; /* delimiter string specified, so set the appropriate flag. */ delimsChanged = true; setMaxDelimCodePoint(); return nextToken(); } /** * Returns the same value as the <code>hasMoreTokens</code> * method. It exists so that this class can implement the * <code>Enumeration</code> interface. * * @return <code>true</code> if there are more tokens; * <code>false</code> otherwise. * @see java.util.Enumeration * @see java.util.StringTokenizer#hasMoreTokens() */ public boolean hasMoreElements() { return hasMoreTokens(); } /** * Returns the same value as the <code>nextToken</code> method, * except that its declared return value is <code>Object</code> rather than * <code>String</code>. It exists so that this class can implement the * <code>Enumeration</code> interface. * * @return the next token in the string. * @exception NoSuchElementException if there are no more tokens in this * tokenizer's string. * @see java.util.Enumeration * @see java.util.StringTokenizer#nextToken() */ public Object nextElement() { return nextToken(); } /** * Calculates the number of times that this tokenizer's * <code>nextToken</code> method can be called before it generates an * exception. The current position is not advanced. * * @return the number of tokens remaining in the string using the current * delimiter set. * @see java.util.StringTokenizer#nextToken() */ public int countTokens() { int count = 0; int currpos = currentPosition; while (currpos < maxPosition) { currpos = skipDelimiters(currpos); if (currpos >= maxPosition) { break; } currpos = scanToken(currpos); count++; } return count; } }
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.profile.codeInspection.ui.inspectionsTree; import com.intellij.codeHighlighting.HighlightDisplayLevel; import com.intellij.codeInsight.daemon.HighlightDisplayKey; import com.intellij.codeInspection.ex.InspectionProfileImpl; import com.intellij.codeInspection.ex.ScopeToolState; import com.intellij.ide.IdeTooltip; import com.intellij.ide.IdeTooltipManager; import com.intellij.lang.annotation.HighlightSeverity; import com.intellij.openapi.Disposable; import com.intellij.openapi.application.ModalityState; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.SystemInfo; import com.intellij.openapi.util.text.StringUtil; import com.intellij.profile.codeInspection.ui.InspectionsAggregationUtil; import com.intellij.profile.codeInspection.ui.SingleInspectionProfilePanel; import com.intellij.profile.codeInspection.ui.table.ScopesAndSeveritiesTable; import com.intellij.profile.codeInspection.ui.table.ThreeStateCheckBoxRenderer; import com.intellij.ui.DoubleClickListener; import com.intellij.ui.treeStructure.treetable.TreeTable; import com.intellij.ui.treeStructure.treetable.TreeTableModel; import com.intellij.ui.treeStructure.treetable.TreeTableTree; import com.intellij.util.Alarm; import com.intellij.util.NullableFunction; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.ui.JBUI; import com.intellij.util.ui.TextTransferable; import com.intellij.util.ui.UIUtil; import com.intellij.util.ui.table.IconTableCellRenderer; import one.util.streamex.MoreCollectors; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.table.AbstractTableModel; import javax.swing.table.TableColumn; import javax.swing.tree.DefaultTreeModel; import javax.swing.tree.TreeNode; import javax.swing.tree.TreePath; import java.awt.*; import java.awt.datatransfer.Transferable; import java.awt.event.*; import java.util.List; import java.util.*; import java.util.stream.Stream; /** * @author Dmitry Batkovich */ public class InspectionsConfigTreeTable extends TreeTable { private final static Logger LOG = Logger.getInstance(InspectionsConfigTreeTable.class); private final static int TREE_COLUMN = 0; private final static int SEVERITIES_COLUMN = 1; private final static int IS_ENABLED_COLUMN = 2; public static int getAdditionalPadding() { return SystemInfo.isMac ? 16 : 0; } public static InspectionsConfigTreeTable create(final InspectionsConfigTreeTableSettings settings, @NotNull Disposable parentDisposable) { return new InspectionsConfigTreeTable(new InspectionsConfigTreeTableModel(settings, parentDisposable)); } public InspectionsConfigTreeTable(final InspectionsConfigTreeTableModel model) { super(model); TableColumn severitiesColumn = getColumnModel().getColumn(SEVERITIES_COLUMN); severitiesColumn.setCellRenderer(new IconTableCellRenderer<Icon>() { @Override public Component getTableCellRendererComponent(JTable table, Object value, boolean selected, boolean focus, int row, int column) { Component component = super.getTableCellRendererComponent(table, value, false, focus, row, column); Color bg = selected ? table.getSelectionBackground() : table.getBackground(); component.setBackground(bg); ((JLabel) component).setText(""); return component; } @Override protected Icon getIcon(@NotNull Icon value, JTable table, int row) { return value; } }); severitiesColumn.setMaxWidth(JBUI.scale(20)); TableColumn isEnabledColumn = getColumnModel().getColumn(IS_ENABLED_COLUMN); isEnabledColumn.setMaxWidth(JBUI.scale(22 + getAdditionalPadding())); ThreeStateCheckBoxRenderer boxRenderer = new ThreeStateCheckBoxRenderer(); boxRenderer.setOpaque(true); isEnabledColumn.setCellRenderer(boxRenderer); isEnabledColumn.setCellEditor(new ThreeStateCheckBoxRenderer()); addMouseMotionListener(new MouseAdapter() { @Override public void mouseMoved(final MouseEvent e) { Point point = e.getPoint(); int column = columnAtPoint(point); int row = rowAtPoint(point); if (column == SEVERITIES_COLUMN && row >= 0 && row < getRowCount()) { Object maybeIcon = getModel().getValueAt(row, column); if (maybeIcon instanceof MultiScopeSeverityIcon) { MultiScopeSeverityIcon icon = (MultiScopeSeverityIcon)maybeIcon; LinkedHashMap<String, HighlightDisplayLevel> scopeToAverageSeverityMap = icon.getScopeToAverageSeverityMap(); JComponent component = null; if (scopeToAverageSeverityMap.size() == 1 && icon.getDefaultScopeName().equals(ContainerUtil.getFirstItem(scopeToAverageSeverityMap.keySet()))) { HighlightDisplayLevel level = ContainerUtil.getFirstItem(scopeToAverageSeverityMap.values()); if (level != null) { JLabel label = new JLabel(); label.setIcon(level.getIcon()); label.setText(SingleInspectionProfilePanel.renderSeverity(level.getSeverity())); component = label; } } else { component = new ScopesAndSeveritiesHintTable(scopeToAverageSeverityMap, icon.getDefaultScopeName()); } IdeTooltipManager.getInstance().show( new IdeTooltip(InspectionsConfigTreeTable.this, point, component), false); } } } }); new DoubleClickListener() { @Override protected boolean onDoubleClick(MouseEvent event) { final TreePath path = getTree().getPathForRow(getTree().getLeadSelectionRow()); if (path != null) { final InspectionConfigTreeNode node = (InspectionConfigTreeNode)path.getLastPathComponent(); if (node.isLeaf()) { model.swapInspectionEnableState(); } } return true; } }.installOn(this); setTransferHandler(new TransferHandler() { @Nullable @Override protected Transferable createTransferable(JComponent c) { final TreePath path = getTree().getPathForRow(getTree().getLeadSelectionRow()); if (path != null) { return new TextTransferable(StringUtil.join(ContainerUtil.mapNotNull(path.getPath(), (NullableFunction<Object, String>)o -> o == path.getPath()[0] ? null : o.toString()), " | ")); } return null; } @Override public int getSourceActions(JComponent c) { return COPY; } }); getTableHeader().setReorderingAllowed(false); getTableHeader().setResizingAllowed(false); registerKeyboardAction(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { model.swapInspectionEnableState(); updateUI(); } }, KeyStroke.getKeyStroke(KeyEvent.VK_SPACE, 0), JComponent.WHEN_FOCUSED); getEmptyText().setText("No enabled inspections available"); } @Nullable public InspectionConfigTreeNode.Tool getStrictlySelectedToolNode() { TreePath[] paths = getTree().getSelectionPaths(); return paths != null && paths.length == 1 && paths[0].getLastPathComponent() instanceof InspectionConfigTreeNode.Tool ? (InspectionConfigTreeNode.Tool)paths[0].getLastPathComponent() : null; } public Collection<InspectionConfigTreeNode.Tool> getSelectedToolNodes() { return InspectionsAggregationUtil.getInspectionsNodes(getTree().getSelectionPaths()); } @Override public void paint(@NotNull Graphics g) { super.paint(g); UIUtil.fixOSXEditorBackground(this); } public abstract static class InspectionsConfigTreeTableSettings { private final TreeNode myRoot; private final Project myProject; public InspectionsConfigTreeTableSettings(final TreeNode root, final Project project) { myRoot = root; myProject = project; } public TreeNode getRoot() { return myRoot; } public Project getProject() { return myProject; } protected abstract InspectionProfileImpl getInspectionProfile(); protected abstract void onChanged(InspectionConfigTreeNode node); public abstract void updateRightPanel(); } public static void setToolEnabled(boolean newState, @NotNull InspectionProfileImpl profile, @NotNull String toolId, @NotNull Project project) { profile.setToolEnabled(toolId, newState); for (ScopeToolState scopeToolState : profile.getTools(toolId, project).getTools()) { scopeToolState.setEnabled(newState); } } private static class InspectionsConfigTreeTableModel extends DefaultTreeModel implements TreeTableModel { private final InspectionsConfigTreeTableSettings mySettings; private final Runnable myUpdateRunnable; private TreeTable myTreeTable; private final Alarm myUpdateAlarm; InspectionsConfigTreeTableModel(final InspectionsConfigTreeTableSettings settings, @NotNull Disposable parentDisposable) { super(settings.getRoot()); mySettings = settings; myUpdateRunnable = () -> { settings.updateRightPanel(); ((AbstractTableModel)myTreeTable.getModel()).fireTableDataChanged(); }; myUpdateAlarm = new Alarm(Alarm.ThreadToUse.SWING_THREAD, parentDisposable); } @Override public int getColumnCount() { return 3; } @Nullable @Override public String getColumnName(final int column) { return null; } @Override public Class getColumnClass(final int column) { switch (column) { case TREE_COLUMN: return TreeTableModel.class; case SEVERITIES_COLUMN: return Icon.class; case IS_ENABLED_COLUMN: return Boolean.class; } throw new IllegalArgumentException(); } @Nullable @Override public Object getValueAt(final Object node, final int column) { if (column == TREE_COLUMN) { return null; } final InspectionConfigTreeNode treeNode = (InspectionConfigTreeNode)node; final List<HighlightDisplayKey> inspectionsKeys = InspectionsAggregationUtil.getInspectionsKeys(treeNode); if (column == SEVERITIES_COLUMN) { final MultiColoredHighlightSeverityIconSink sink = new MultiColoredHighlightSeverityIconSink(); for (final HighlightDisplayKey selectedInspectionsNode : inspectionsKeys) { final String toolId = selectedInspectionsNode.toString(); if (mySettings.getInspectionProfile().getTools(toolId, mySettings.getProject()).isEnabled()) { sink.put(mySettings.getInspectionProfile().getToolDefaultState(toolId, mySettings.getProject()), mySettings.getInspectionProfile().getNonDefaultTools(toolId, mySettings.getProject())); } } return sink.constructIcon(mySettings.getInspectionProfile()); } else if (column == IS_ENABLED_COLUMN) { return isEnabled(inspectionsKeys); } throw new IllegalArgumentException(); } @Nullable private Boolean isEnabled(final List<HighlightDisplayKey> selectedInspectionsNodes) { return selectedInspectionsNodes .stream() .map(key -> mySettings.getInspectionProfile().getTools(key.toString(), mySettings.getProject())) .flatMap(tools -> tools.isEnabled() ? tools.getTools().stream().map(ScopeToolState::isEnabled) : Stream.of(false)) .distinct() .collect(MoreCollectors.onlyOne()).orElse(null); } @Override public boolean isCellEditable(final Object node, final int column) { return column == IS_ENABLED_COLUMN; } @Override public void setValueAt(final Object aValue, final Object node, final int column) { LOG.assertTrue(column == IS_ENABLED_COLUMN); if (aValue == null) { return; } final boolean doEnable = (Boolean) aValue; final InspectionProfileImpl profile = mySettings.getInspectionProfile(); if (profile == null) return; for (final InspectionConfigTreeNode.Tool aNode : InspectionsAggregationUtil.getInspectionsNodes((InspectionConfigTreeNode)node)) { setToolEnabled(doEnable, profile, aNode.getKey().toString(), mySettings.getProject()); mySettings.onChanged(aNode); } updateRightPanel(); } public void swapInspectionEnableState() { LOG.assertTrue(myTreeTable != null); Boolean state = null; final HashSet<HighlightDisplayKey> tools = new HashSet<>(); final List<InspectionConfigTreeNode> nodes = new ArrayList<>(); TreePath[] selectionPaths = myTreeTable.getTree().getSelectionPaths(); if (selectionPaths == null) return; for (TreePath selectionPath : selectionPaths) { final InspectionConfigTreeNode node = (InspectionConfigTreeNode)selectionPath.getLastPathComponent(); collectInspectionFromNodes(node, tools, nodes); } final int[] selectedRows = myTreeTable.getSelectedRows(); for (int selectedRow : selectedRows) { final Boolean value = (Boolean)myTreeTable.getValueAt(selectedRow, IS_ENABLED_COLUMN); if (state == null) { state = value; } else if (!state.equals(value)) { state = null; break; } } final boolean newState = !Boolean.TRUE.equals(state); final InspectionProfileImpl profile = mySettings.getInspectionProfile(); if (profile == null) return; for (HighlightDisplayKey tool : tools) { setToolEnabled(newState, profile, tool.toString(), mySettings.getProject()); } for (InspectionConfigTreeNode node : nodes) { mySettings.onChanged(node); } updateRightPanel(); } private void updateRightPanel() { if (myTreeTable != null) { if (!myUpdateAlarm.isDisposed()) { myUpdateAlarm.cancelAllRequests(); myUpdateAlarm.addRequest(myUpdateRunnable, 10, ModalityState.stateForComponent(myTreeTable)); } } } private static void collectInspectionFromNodes(final InspectionConfigTreeNode node, final Set<? super HighlightDisplayKey> tools, final List<? super InspectionConfigTreeNode> nodes) { if (node == null) { return; } nodes.add(node); if (node instanceof InspectionConfigTreeNode.Group) { for (int i = 0; i < node.getChildCount(); i++) { collectInspectionFromNodes((InspectionConfigTreeNode)node.getChildAt(i), tools, nodes); } } else { tools.add(((InspectionConfigTreeNode.Tool)node).getKey()); } } @Override public void setTree(final JTree tree) { myTreeTable = ((TreeTableTree)tree).getTreeTable(); } } private static class SeverityAndOccurrences { private HighlightSeverity myPrimarySeverity; private final Map<String, HighlightSeverity> myOccurrences = new HashMap<>(); public SeverityAndOccurrences incOccurrences(final String toolName, final HighlightSeverity severity) { if (myPrimarySeverity == null) { myPrimarySeverity = severity; } else if (!Comparing.equal(severity, myPrimarySeverity)) { myPrimarySeverity = ScopesAndSeveritiesTable.MIXED_FAKE_SEVERITY; } myOccurrences.put(toolName, severity); return this; } public HighlightSeverity getPrimarySeverity() { return myPrimarySeverity; } public int getOccurrencesSize() { return myOccurrences.size(); } public Map<String, HighlightSeverity> getOccurrences() { return myOccurrences; } } private static class MultiColoredHighlightSeverityIconSink { private final Map<String, SeverityAndOccurrences> myScopeToAverageSeverityMap = new HashMap<>(); private String myDefaultScopeName; public Icon constructIcon(final InspectionProfileImpl inspectionProfile) { final Map<String, HighlightSeverity> computedSeverities = computeSeverities(); if (computedSeverities == null) { return null; } boolean allScopesHasMixedSeverity = true; for (HighlightSeverity severity : computedSeverities.values()) { if (!severity.equals(ScopesAndSeveritiesTable.MIXED_FAKE_SEVERITY)) { allScopesHasMixedSeverity = false; break; } } return allScopesHasMixedSeverity ? ScopesAndSeveritiesTable.MIXED_FAKE_LEVEL.getIcon() : new MultiScopeSeverityIcon(computedSeverities, myDefaultScopeName, inspectionProfile); } @Nullable private Map<String, HighlightSeverity> computeSeverities() { if (myScopeToAverageSeverityMap.isEmpty()) { return null; } final Map<String, HighlightSeverity> result = new HashMap<>(); final Map.Entry<String, SeverityAndOccurrences> entry = ContainerUtil.getFirstItem(myScopeToAverageSeverityMap.entrySet()); result.put(entry.getKey(), entry.getValue().getPrimarySeverity()); if (myScopeToAverageSeverityMap.size() == 1) { return result; } final SeverityAndOccurrences defaultSeveritiesAndOccurrences = myScopeToAverageSeverityMap.get(myDefaultScopeName); if (defaultSeveritiesAndOccurrences == null) { for (Map.Entry<String, SeverityAndOccurrences> e: myScopeToAverageSeverityMap.entrySet()) { final HighlightSeverity primarySeverity = e.getValue().getPrimarySeverity(); if (primarySeverity != null) { result.put(e.getKey(), primarySeverity); } } return result; } final int allInspectionsCount = defaultSeveritiesAndOccurrences.getOccurrencesSize(); final Map<String, HighlightSeverity> allScopes = defaultSeveritiesAndOccurrences.getOccurrences(); for (String currentScope : myScopeToAverageSeverityMap.keySet()) { final SeverityAndOccurrences currentSeverityAndOccurrences = myScopeToAverageSeverityMap.get(currentScope); if (currentSeverityAndOccurrences == null) { continue; } final HighlightSeverity currentSeverity = currentSeverityAndOccurrences.getPrimarySeverity(); if (currentSeverity == ScopesAndSeveritiesTable.MIXED_FAKE_SEVERITY || currentSeverityAndOccurrences.getOccurrencesSize() == allInspectionsCount || myDefaultScopeName.equals(currentScope)) { result.put(currentScope, currentSeverity); } else { Set<String> toolsToCheck = ContainerUtil.newHashSet(allScopes.keySet()); toolsToCheck.removeAll(currentSeverityAndOccurrences.getOccurrences().keySet()); boolean doContinue = false; final Map<String, HighlightSeverity> lowerScopeOccurrences = myScopeToAverageSeverityMap.get(myDefaultScopeName).getOccurrences(); for (String toolName : toolsToCheck) { final HighlightSeverity currentToolSeverity = lowerScopeOccurrences.get(toolName); if (currentToolSeverity != null) { if (!currentSeverity.equals(currentToolSeverity)) { result.put(currentScope, ScopesAndSeveritiesTable.MIXED_FAKE_SEVERITY); doContinue = true; break; } } } if (doContinue) { continue; } result.put(currentScope, currentSeverity); } } return result; } public void put(@NotNull final ScopeToolState defaultState, @NotNull final List<? extends ScopeToolState> nonDefault) { putOne(defaultState); if (myDefaultScopeName == null) { myDefaultScopeName = defaultState.getScopeName(); } for (final ScopeToolState scopeToolState : nonDefault) { putOne(scopeToolState); } } private void putOne(final ScopeToolState state) { if (!state.isEnabled()) { return; } final Icon icon = state.getLevel().getIcon(); final String scopeName = state.getScopeName(); if (icon instanceof HighlightDisplayLevel.ColoredIcon) { final SeverityAndOccurrences severityAndOccurrences = myScopeToAverageSeverityMap.get(scopeName); final String inspectionName = state.getTool().getShortName(); if (severityAndOccurrences == null) { myScopeToAverageSeverityMap.put(scopeName, new SeverityAndOccurrences().incOccurrences(inspectionName, state.getLevel().getSeverity())); } else { severityAndOccurrences.incOccurrences(inspectionName, state.getLevel().getSeverity()); } } } } }
package com.jcwhatever.nucleus.internal.managed.commands; import com.jcwhatever.v1_8_R3.BukkitTester; import com.jcwhatever.nucleus.NucleusTest; import com.jcwhatever.nucleus.internal.managed.commands.CommandCollection.ICommandContainerFactory; import com.jcwhatever.nucleus.internal.managed.commands.DummyRegisteredCommand.CommandInfoBuilder; import com.jcwhatever.nucleus.managed.commands.CommandInfo; import com.jcwhatever.nucleus.managed.commands.ICommand; import com.jcwhatever.nucleus.managed.commands.exceptions.InvalidArgumentException; import com.jcwhatever.nucleus.managed.commands.exceptions.InvalidParameterException; import com.jcwhatever.nucleus.managed.commands.exceptions.MissingArgumentException; import com.jcwhatever.nucleus.managed.commands.exceptions.TooManyArgsException; import org.bukkit.plugin.Plugin; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; public class ArgumentParserTest { ArgumentParser parser = new ArgumentParser(); private DummyRegisteredCommand getCommand() { CommandDispatcher dispatcher = new CommandDispatcher( BukkitTester.mockPlugin("dummy"), new ICommandContainerFactory() { @Override public RegisteredCommand create(Plugin plugin, ICommand command) { return new DummyRegisteredCommand(plugin, command, this); } }); dispatcher.registerCommand(DummyCommand.class); return (DummyRegisteredCommand)dispatcher.getCommand("dummy"); } @BeforeClass public static void testStartup() { NucleusTest.init(); } /** * Test parsing; No Arguments expected. */ @Test public void testParse() throws Exception { DummyRegisteredCommand command = getCommand(); ArgumentParseResults results; results = parser.parse(command, new String[0]); Assert.assertEquals(0, results.getArgMap().size()); // make sure providing too many args causes an exception try { parser.parse(command, new String[]{"staticParam"}); throw new AssertionError("TooManyArgsException was expected to be thrown."); } catch (TooManyArgsException ignore) {} } /** * Test parsing; 1 static argument expected. */ @Test public void testParse1() throws Exception { DummyRegisteredCommand command = getCommand(); CommandInfo info = new CommandInfoBuilder("dummy").staticParams("name").build(); command.setInfo(info); ArgumentParseResults results; results = parser.parse(command, new String[]{"nameArg"}); Assert.assertEquals(1, results.getArgMap().size()); // check for missing argument, no arguments provided where expected try { parser.parse(command, new String[]{}); throw new AssertionError("MissingArgumentException was expected to be thrown."); } catch (MissingArgumentException ignore) {} // check for too many arguments try { parser.parse(command, new String[]{ "arg1", "arg2" }); throw new AssertionError("TooManyArgsException was expected to be thrown."); } catch (TooManyArgsException ignore) {} } /** * Test parsing; 1 required floating argument expected. */ @Test public void testParse2() throws Exception { DummyRegisteredCommand command = getCommand(); CommandInfo info = new CommandInfoBuilder("dummy").floatingParams("param").build(); command.setInfo(info); ArgumentParseResults results; results = parser.parse(command, new String[]{"-param", "\"arg", "with", "spaces\""}); Assert.assertEquals(1, results.getArgMap().size()); // check missing arg, floating parameter not included try { parser.parse(command, new String[0]); throw new AssertionError("MissingArgumentException was expected to be thrown."); } catch (MissingArgumentException ignore) { } // check missing arg, floating parameter missing its argument try { parser.parse(command, new String[]{"-param"}); throw new AssertionError("MissingArgumentException was expected to be thrown."); } catch (MissingArgumentException ignore) { } // check invalid argument, invalid static argument try { parser.parse(command, new String[]{"staticArg", "-param", "test"}); throw new AssertionError("InvalidArgumentException was expected to be thrown."); } catch (InvalidArgumentException ignore) { } } /** * Test parsing; 1 optional flag */ @Test public void testParse3() throws Exception { DummyRegisteredCommand command = getCommand(); CommandInfo info = new CommandInfoBuilder("dummy").flags("flag1", "flag2").build(); command.setInfo(info); ArgumentParseResults results; results = parser.parse(command, new String[]{"--flag1", "--flag2"}); Assert.assertEquals(0, results.getArgMap().size()); // flags are not stored in arg map Assert.assertEquals(true, results.getFlag("flag1")); Assert.assertEquals(true, results.getFlag("flag2")); results = parser.parse(command, new String[]{"--flag1"}); Assert.assertEquals(0, results.getArgMap().size()); // flags are not stored in arg map Assert.assertEquals(true, results.getFlag("flag1")); Assert.assertEquals(false, results.getFlag("flag2")); results = parser.parse(command, new String[]{"--flag2"}); Assert.assertEquals(0, results.getArgMap().size()); // flags are not stored in arg map Assert.assertEquals(false, results.getFlag("flag1")); Assert.assertEquals(true, results.getFlag("flag2")); results = parser.parse(command, new String[0]); Assert.assertEquals(0, results.getArgMap().size()); // flags are not stored in arg map Assert.assertEquals(false, results.getFlag("flag1")); Assert.assertEquals(false, results.getFlag("flag2")); // check for invalid argument, invalid static argument try { parser.parse(command, new String[]{"staticArg"}); throw new AssertionError("InvalidArgumentException was expected to be thrown."); } catch (InvalidArgumentException ignore) { } // check for invalid parameter, invalid flag name try { parser.parse(command, new String[]{"--flag3"}); throw new AssertionError("InvalidParameterException was expected to be thrown."); } catch (InvalidParameterException ignore) { } // check for invalid parameter, undefined floating parameter try { parser.parse(command, new String[]{"-floating", "arg"}); throw new AssertionError("InvalidParameterException was expected to be thrown."); } catch (InvalidParameterException ignore) { } // check for invalid parameter, invalid flag name try { parser.parse(command, new String[]{"--flag1", "--flag3"}); throw new AssertionError("InvalidParameterException was expected to be thrown."); } catch (InvalidParameterException ignore) { } } /** * Test parsing; 1 static param required, 1 required floating argument. */ @Test public void testParse4() throws Exception { DummyRegisteredCommand command = getCommand(); CommandInfo info = new CommandInfoBuilder("dummy") .staticParams("static") .floatingParams("param").build(); command.setInfo(info); ArgumentParseResults results; results = parser.parse(command, new String[]{"\"static", "arg\"", "-param", "\"arg", "with", "spaces\""}); Assert.assertEquals(2, results.getArgMap().size()); Assert.assertEquals("static arg", results.getArgMap().get("static").getValue()); Assert.assertEquals("arg with spaces", results.getArgMap().get("param").getValue()); // check for missing argument, required floating arg missing try { parser.parse(command, new String[]{"\"static", "arg\""}); throw new AssertionError("MissingArgumentException expected to be thrown."); } catch (MissingArgumentException ignore) { } // check for missing argument, required static arg missing try { parser.parse(command, new String[]{"-param", "\"arg", "with", "spaces\""}); throw new AssertionError("MissingArgumentException expected to be thrown."); } catch (MissingArgumentException ignore) { } // check for invalid parameter, param marked as flag instead of floating param try { parser.parse(command, new String[]{"\"static", "arg\"", "--param", "\"arg", "with", "spaces\""}); throw new AssertionError("InvalidParameterException expected to be thrown."); } catch (InvalidParameterException ignore) { } // check for missing argument, static param out of order try { parser.parse(command, new String[]{"-param", "\"arg", "with", "spaces\"", "\"static", "arg\""}); throw new AssertionError("MissingArgumentException expected to be thrown."); } catch (MissingArgumentException ignore) { } } /** * Test parsing; 1 static param required, 1 optional floating argument. */ @Test public void testParse5() throws Exception { DummyRegisteredCommand command = getCommand(); CommandInfo info = new CommandInfoBuilder("dummy") .staticParams("static") .floatingParams("param=test").build(); command.setInfo(info); ArgumentParseResults results; results = parser.parse(command, new String[]{"\"static", "arg\"", "-param", "\"arg", "with", "spaces\""}); Assert.assertEquals(2, results.getArgMap().size()); Assert.assertEquals("static arg", results.getArgMap().get("static").getValue()); Assert.assertEquals("arg with spaces", results.getArgMap().get("param").getValue()); results = parser.parse(command, new String[]{"\"static", "arg\"" }); Assert.assertEquals(2, results.getArgMap().size()); Assert.assertEquals("static arg", results.getArgMap().get("static").getValue()); Assert.assertEquals("test", results.getArgMap().get("param").getValue()); } /** * Test parsing; 1 optional static param, 1 optional floating argument. */ @Test public void testParse6() throws Exception { DummyRegisteredCommand command = getCommand(); CommandInfo info = new CommandInfoBuilder("dummy") .staticParams("static=test") .floatingParams("param=test").build(); command.setInfo(info); ArgumentParseResults results; results = parser.parse(command, new String[]{"\"static", "arg\"", "-param", "\"arg", "with", "spaces\""}); Assert.assertEquals(2, results.getArgMap().size()); Assert.assertEquals("static arg", results.getArgMap().get("static").getValue()); Assert.assertEquals("arg with spaces", results.getArgMap().get("param").getValue()); results = parser.parse(command, new String[]{ }); Assert.assertEquals(2, results.getArgMap().size()); Assert.assertEquals("test", results.getArgMap().get("static").getValue()); Assert.assertEquals("test", results.getArgMap().get("param").getValue()); } }
/* * Copyright 2000-2013 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.psi.impl.java.stubs; import com.intellij.lang.ASTNode; import com.intellij.lang.LighterAST; import com.intellij.lang.LighterASTNode; import com.intellij.psi.*; import com.intellij.psi.impl.cache.ModifierFlags; import com.intellij.psi.impl.cache.RecordUtil; import com.intellij.psi.impl.cache.TypeInfo; import com.intellij.psi.impl.java.stubs.impl.PsiMethodStubImpl; import com.intellij.psi.impl.java.stubs.index.JavaStubIndexKeys; import com.intellij.psi.impl.source.PsiAnnotationMethodImpl; import com.intellij.psi.impl.source.PsiMethodImpl; import com.intellij.psi.impl.source.tree.ElementType; import com.intellij.psi.impl.source.tree.JavaDocElementType; import com.intellij.psi.impl.source.tree.JavaElementType; import com.intellij.psi.impl.source.tree.LightTreeUtil; import com.intellij.psi.impl.source.tree.java.AnnotationMethodElement; import com.intellij.psi.stubs.IndexSink; import com.intellij.psi.stubs.StubElement; import com.intellij.psi.stubs.StubInputStream; import com.intellij.psi.stubs.StubOutputStream; import com.intellij.psi.tree.IElementType; import com.intellij.psi.util.TypeConversionUtil; import com.intellij.util.BitUtil; import com.intellij.util.containers.ContainerUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import java.io.IOException; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; /** * @author max */ abstract class JavaMethodElementType extends JavaStubElementType<PsiMethodStub, PsiMethod> { private static final String TYPE_PARAMETER_PSEUDO_NAME = "$TYPE_PARAMETER$"; JavaMethodElementType(@NonNls final String name) { super(name); } @Override public PsiMethod createPsi(@NotNull final PsiMethodStub stub) { return getPsiFactory(stub).createMethod(stub); } @Override public PsiMethod createPsi(@NotNull final ASTNode node) { if (node instanceof AnnotationMethodElement) { return new PsiAnnotationMethodImpl(node); } return new PsiMethodImpl(node); } @NotNull @Override public PsiMethodStub createStub(@NotNull final LighterAST tree, @NotNull final LighterASTNode node, @NotNull final StubElement parentStub) { String name = null; boolean isConstructor = true; boolean isVarArgs = false; boolean isDeprecatedByComment = false; boolean hasDeprecatedAnnotation = false; boolean hasDocComment = false; String defValueText = null; boolean expectingDef = false; for (final LighterASTNode child : tree.getChildren(node)) { final IElementType type = child.getTokenType(); if (type == JavaDocElementType.DOC_COMMENT) { hasDocComment = true; isDeprecatedByComment = RecordUtil.isDeprecatedByDocComment(tree, child); } else if (type == JavaElementType.MODIFIER_LIST) { hasDeprecatedAnnotation = RecordUtil.isDeprecatedByAnnotation(tree, child); } else if (type == JavaElementType.TYPE) { isConstructor = false; } else if (type == JavaTokenType.IDENTIFIER) { name = RecordUtil.intern(tree.getCharTable(), child); } else if (type == JavaElementType.PARAMETER_LIST) { final List<LighterASTNode> params = LightTreeUtil.getChildrenOfType(tree, child, JavaElementType.PARAMETER); if (!params.isEmpty()) { final LighterASTNode pType = LightTreeUtil.firstChildOfType(tree, params.get(params.size() - 1), JavaElementType.TYPE); if (pType != null) { isVarArgs = LightTreeUtil.firstChildOfType(tree, pType, JavaTokenType.ELLIPSIS) != null; } } } else if (type == JavaTokenType.DEFAULT_KEYWORD) { expectingDef = true; } else if (expectingDef && !ElementType.JAVA_COMMENT_OR_WHITESPACE_BIT_SET.contains(type) && type != JavaTokenType.SEMICOLON && type != JavaElementType.CODE_BLOCK) { defValueText = LightTreeUtil.toFilteredString(tree, child, null); break; } } TypeInfo typeInfo = isConstructor ? TypeInfo.createConstructorType() : TypeInfo.create(tree, node, parentStub); boolean isAnno = node.getTokenType() == JavaElementType.ANNOTATION_METHOD; byte flags = PsiMethodStubImpl.packFlags(isConstructor, isAnno, isVarArgs, isDeprecatedByComment, hasDeprecatedAnnotation, hasDocComment); return new PsiMethodStubImpl(parentStub, name, typeInfo, flags, defValueText); } @Override public void serialize(@NotNull final PsiMethodStub stub, @NotNull final StubOutputStream dataStream) throws IOException { dataStream.writeName(stub.getName()); TypeInfo.writeTYPE(dataStream, stub.getReturnTypeText(false)); dataStream.writeByte(((PsiMethodStubImpl)stub).getFlags()); if (stub.isAnnotationMethod()) { dataStream.writeName(stub.getDefaultValueText()); } } @NotNull @Override public PsiMethodStub deserialize(@NotNull final StubInputStream dataStream, final StubElement parentStub) throws IOException { String name = dataStream.readNameString(); final TypeInfo type = TypeInfo.readTYPE(dataStream); byte flags = dataStream.readByte(); String defaultMethodValue = PsiMethodStubImpl.isAnnotationMethod(flags) ? dataStream.readNameString() : null; return new PsiMethodStubImpl(parentStub, name, type, flags, defaultMethodValue); } @Override public void indexStub(@NotNull final PsiMethodStub stub, @NotNull final IndexSink sink) { final String name = stub.getName(); if (name != null) { sink.occurrence(JavaStubIndexKeys.METHODS, name); if (RecordUtil.isStaticNonPrivateMember(stub)) { sink.occurrence(JavaStubIndexKeys.JVM_STATIC_MEMBERS_NAMES, name); sink.occurrence(JavaStubIndexKeys.JVM_STATIC_MEMBERS_TYPES, stub.getReturnTypeText(false).getShortTypeText()); } } Set<String> methodTypeParams = getVisibleTypeParameters(stub); for (StubElement stubElement : stub.getChildrenStubs()) { if (stubElement instanceof PsiParameterListStub) { for (StubElement paramStub : ((PsiParameterListStub)stubElement).getChildrenStubs()) { if (paramStub instanceof PsiParameterStub) { TypeInfo type = ((PsiParameterStub)paramStub).getType(false); String typeName = PsiNameHelper.getShortClassName(type.text); if (TypeConversionUtil.isPrimitive(typeName) || TypeConversionUtil.isPrimitiveWrapper(typeName)) continue; sink.occurrence(JavaStubIndexKeys.METHOD_TYPES, typeName); if (typeName.equals(type.text) && (type.arrayCount == 0 || type.arrayCount == 1 && type.isEllipsis) && methodTypeParams.contains(typeName)) { sink.occurrence(JavaStubIndexKeys.METHOD_TYPES, TYPE_PARAMETER_PSEUDO_NAME); } } } break; } } } @NotNull private static Set<String> getVisibleTypeParameters(@NotNull StubElement<?> stub) { Set<String> result = null; while (stub != null) { Set<String> names = getOwnTypeParameterNames(stub); if (!names.isEmpty()) { if (result == null) result = ContainerUtil.newHashSet(); result.addAll(names); } if (isStatic(stub)) break; stub = stub.getParentStub(); } return result == null ? Collections.emptySet() : result; } private static boolean isStatic(@NotNull StubElement<?> stub) { if (stub instanceof PsiMemberStub) { PsiModifierListStub modList = stub.findChildStubByType(JavaStubElementTypes.MODIFIER_LIST); if (modList != null) { return BitUtil.isSet(modList.getModifiersMask(), ModifierFlags.NAME_TO_MODIFIER_FLAG_MAP.get(PsiModifier.STATIC)); } } return false; } private static Set<String> getOwnTypeParameterNames(StubElement<?> stubElement) { StubElement<PsiTypeParameterList> typeParamList = stubElement.findChildStubByType(JavaStubElementTypes.TYPE_PARAMETER_LIST); if (typeParamList == null) return Collections.emptySet(); Set<String> methodTypeParams = null; for (Object tStub : typeParamList.getChildrenStubs()) { if (tStub instanceof PsiTypeParameterStub) { if (methodTypeParams == null) methodTypeParams = new HashSet<>(); methodTypeParams.add(((PsiTypeParameterStub)tStub).getName()); } } return methodTypeParams == null ? Collections.emptySet() : methodTypeParams; } }
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.refactoring.introduceVariable; import com.intellij.codeInsight.BlockUtils; import com.intellij.codeInsight.Nullability; import com.intellij.codeInsight.NullabilityAnnotationInfo; import com.intellij.codeInsight.NullableNotNullManager; import com.intellij.codeInsight.daemon.impl.analysis.HighlightUtil; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.diagnostic.Attachment; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.diagnostic.RuntimeExceptionWithAttachments; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.LogicalPosition; import com.intellij.openapi.editor.ScrollType; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Computable; import com.intellij.psi.*; import com.intellij.psi.codeStyle.JavaCodeStyleManager; import com.intellij.psi.util.InheritanceUtil; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.util.PsiUtil; import com.intellij.refactoring.introduceField.ElementToWorkOn; import com.intellij.refactoring.util.FieldConflictsResolver; import com.intellij.refactoring.util.RefactoringUtil; import com.intellij.util.ArrayUtilRt; import com.intellij.util.IncorrectOperationException; import com.intellij.util.ObjectUtils; import com.intellij.util.ThreeState; import com.intellij.util.containers.ContainerUtil; import com.siyeh.ig.psiutils.*; import one.util.streamex.StreamEx; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.*; /** * Performs actual write action (see {@link #extractVariable()}) which introduces new variable and replaces all occurrences. * No user interaction is performed here. */ class VariableExtractor { private static final Logger LOG = Logger.getInstance(VariableExtractor.class); private final Project myProject; private final Editor myEditor; private final IntroduceVariableSettings mySettings; private final PsiExpression myExpression; private @NotNull PsiElement myAnchor; private final PsiElement myContainer; private final PsiExpression[] myOccurrences; private final boolean myReplaceSelf; private final FieldConflictsResolver myFieldConflictsResolver; private final LogicalPosition myPosition; private VariableExtractor(final Project project, final PsiExpression expression, final Editor editor, final PsiElement anchorStatement, final PsiExpression[] occurrences, final IntroduceVariableSettings settings) { myProject = project; myExpression = expression; myEditor = editor; myOccurrences = occurrences; mySettings = settings; myContainer = anchorStatement.getParent(); myAnchor = correctAnchor(expression, anchorStatement, occurrences); myReplaceSelf = settings.isReplaceLValues() || !RefactoringUtil.isAssignmentLHS(expression); PsiCodeBlock newDeclarationScope = PsiTreeUtil.getParentOfType(myContainer, PsiCodeBlock.class, false); myFieldConflictsResolver = new FieldConflictsResolver(settings.getEnteredName(), newDeclarationScope); myPosition = editor != null ? editor.getCaretModel().getLogicalPosition() : null; } @NotNull private SmartPsiElementPointer<PsiVariable> extractVariable() { ApplicationManager.getApplication().assertWriteAccessAllowed(); final PsiExpression newExpr = myFieldConflictsResolver.fixInitializer(myExpression); if (myAnchor == myExpression) { myAnchor = newExpr; } PsiExpression initializer = RefactoringUtil.unparenthesizeExpression(newExpr); final SmartTypePointer selectedType = SmartTypePointerManager.getInstance(myProject).createSmartTypePointer( mySettings.getSelectedType()); initializer = IntroduceVariableBase.simplifyVariableInitializer(initializer, selectedType.getType()); CommentTracker commentTracker = new CommentTracker(); commentTracker.markUnchanged(initializer); initializer = (PsiExpression)initializer.copy(); PsiType type = stripNullabilityAnnotationsFromTargetType(selectedType, newExpr); PsiElement declaration = createDeclaration(type, mySettings.getEnteredName(), initializer); replaceOccurrences(newExpr); ensureCodeBlock(); PsiVariable var = addVariable(declaration, initializer); if (myAnchor instanceof PsiExpressionStatement && ExpressionUtils.isReferenceTo(((PsiExpressionStatement)myAnchor).getExpression(), var)) { commentTracker.deleteAndRestoreComments(myAnchor); if (myEditor != null) { myEditor.getCaretModel().moveToLogicalPosition(myPosition); myEditor.getCaretModel().moveToOffset(var.getTextRange().getEndOffset()); myEditor.getScrollingModel().scrollToCaret(ScrollType.RELATIVE); myEditor.getSelectionModel().removeSelection(); } } highlight(var); if (!(var instanceof PsiPatternVariable)) { PsiUtil.setModifierProperty(var, PsiModifier.FINAL, mySettings.isDeclareFinal()); } if (mySettings.isDeclareVarType()) { PsiTypeElement typeElement = var.getTypeElement(); LOG.assertTrue(typeElement != null); IntroduceVariableBase.expandDiamondsAndReplaceExplicitTypeWithVar(typeElement, var); } myFieldConflictsResolver.fix(); return SmartPointerManager.getInstance(myProject).createSmartPsiElementPointer(var); } private void ensureCodeBlock() { if (myAnchor instanceof PsiStatement && RefactoringUtil.isLoopOrIf(myAnchor.getParent())) { myAnchor = BlockUtils.expandSingleStatementToBlockStatement((PsiStatement)myAnchor); } if (myAnchor instanceof PsiInstanceOfExpression && PsiUtil.skipParenthesizedExprDown(myExpression) instanceof PsiTypeCastExpression) { return; } if (myAnchor instanceof PsiExpression) { PsiExpression place = RefactoringUtil.ensureCodeBlock(((PsiExpression)myAnchor)); if (place == null) { throw new RuntimeExceptionWithAttachments( "Cannot ensure code block: myAnchor type is " + myAnchor.getClass() + "; parent type is " + myAnchor.getParent().getClass(), new Attachment("context.txt", myContainer.getText())); } PsiElement statement = RefactoringUtil.getParentStatement(place, false); if (statement == null) { throw new RuntimeExceptionWithAttachments( "Cannot find parent statement for " + place.getClass() + "; parent type is " + place.getParent().getClass(), new Attachment("context.txt", myContainer.getText())); } myAnchor = statement; } } private void highlight(PsiVariable var) { if (myEditor != null) { PsiElement[] occurrences = PsiTreeUtil.collectElements(myContainer, e -> e instanceof PsiReference && ((PsiReference)e).isReferenceTo(var)); IntroduceVariableBase.highlightReplacedOccurrences(myProject, myEditor, occurrences); } } private void replaceOccurrences(PsiExpression newExpr) { assert myAnchor.isValid(); PsiElementFactory elementFactory = JavaPsiFacade.getElementFactory(myProject); PsiExpression ref = elementFactory.createExpressionFromText(mySettings.getEnteredName(), null); boolean needReplaceSelf = myReplaceSelf; if (mySettings.isReplaceAllOccurrences()) { for (PsiExpression occurrence : myOccurrences) { PsiExpression correctedOccurrence = occurrence.equals(myExpression) ? newExpr : occurrence; correctedOccurrence = RefactoringUtil.outermostParenthesizedExpression(correctedOccurrence); if (mySettings.isReplaceLValues() || !RefactoringUtil.isAssignmentLHS(correctedOccurrence)) { PsiElement replacement = IntroduceVariableBase.replace(correctedOccurrence, ref, myProject); if (!myAnchor.isValid()) { myAnchor = replacement; } } } needReplaceSelf &= newExpr instanceof PsiPolyadicExpression && newExpr.isValid() && !newExpr.isPhysical(); } if (needReplaceSelf) { PsiElement replacement = IntroduceVariableBase.replace(newExpr, ref, myProject); if (!myAnchor.isValid()) { myAnchor = replacement; } } } private PsiVariable addVariable(PsiElement declaration, PsiExpression initializer) { declaration = addDeclaration(declaration, initializer, myAnchor); declaration = JavaCodeStyleManager.getInstance(myProject).shortenClassReferences(declaration); return (PsiVariable)(declaration instanceof PsiDeclarationStatement ? ((PsiDeclarationStatement)declaration).getDeclaredElements()[0] : declaration); } @NotNull private PsiElement createDeclaration(@NotNull PsiType type, @NotNull String name, PsiExpression initializer) { PsiElementFactory elementFactory = JavaPsiFacade.getElementFactory(myProject); if (myAnchor instanceof PsiInstanceOfExpression && initializer instanceof PsiTypeCastExpression) { PsiTypeElement castType = Objects.requireNonNull(((PsiTypeCastExpression)initializer).getCastType()); return elementFactory.createExpressionFromText( ((PsiInstanceOfExpression)myAnchor).getOperand().getText() + " instanceof " + castType.getText() + " " + name, myContainer); } if (myContainer instanceof PsiClass) { PsiField declaration = elementFactory.createField(name, type); declaration.setInitializer(initializer); return declaration; } return elementFactory.createVariableDeclarationStatement(name, type, initializer, myContainer); } private static PsiElement addDeclaration(PsiElement declaration, PsiExpression initializer, @NotNull PsiElement anchor) { if (anchor instanceof PsiDeclarationStatement) { final PsiElement[] declaredElements = ((PsiDeclarationStatement)anchor).getDeclaredElements(); if (declaredElements.length > 1) { final int[] usedFirstVar = new int[]{-1}; initializer.accept(new JavaRecursiveElementWalkingVisitor() { @Override public void visitReferenceExpression(PsiReferenceExpression expression) { final int i = ArrayUtilRt.find(declaredElements, expression.resolve()); if (i > -1) { usedFirstVar[0] = Math.max(i, usedFirstVar[0]); } super.visitReferenceExpression(expression); } }); if (usedFirstVar[0] > -1) { final PsiVariable psiVariable = (PsiVariable)declaredElements[usedFirstVar[0]]; psiVariable.normalizeDeclaration(); final PsiDeclarationStatement parDeclarationStatement = PsiTreeUtil.getParentOfType(psiVariable, PsiDeclarationStatement.class); return anchor.getParent().addAfter(declaration, parDeclarationStatement); } } } if (anchor instanceof PsiInstanceOfExpression && declaration instanceof PsiInstanceOfExpression) { PsiInstanceOfExpression newInstanceOf = (PsiInstanceOfExpression)anchor.replace(declaration); return ((PsiTypeTestPattern)Objects.requireNonNull(newInstanceOf.getPattern())).getPatternVariable(); } if (anchor instanceof PsiResourceListElement) { PsiDeclarationStatement declarationStatement = (PsiDeclarationStatement)declaration; PsiLocalVariable localVariable = (PsiLocalVariable)declarationStatement.getDeclaredElements()[0]; PsiResourceVariable resourceVariable = JavaPsiFacade.getElementFactory(anchor.getProject()) .createResourceVariable(localVariable.getName(), localVariable.getType(), initializer, anchor); return anchor.replace(resourceVariable); } PsiElement parent = anchor.getParent(); if (parent == null) { throw new IllegalStateException("Unexpectedly anchor has no parent. Anchor class: " + anchor.getClass()); } return parent.addBefore(declaration, anchor); } @NotNull private static PsiType stripNullabilityAnnotationsFromTargetType(SmartTypePointer selectedType, final PsiExpression expression) { PsiType type = selectedType.getType(); if (type == null) { throw new IncorrectOperationException("Unexpected empty type pointer"); } PsiDeclarationStatement probe = JavaPsiFacade.getElementFactory(expression.getProject()) .createVariableDeclarationStatement("x", TypeUtils.getObjectType(expression), null, expression); Project project = expression.getProject(); NullabilityAnnotationInfo nullabilityAnnotationInfo = NullableNotNullManager.getInstance(project).findExplicitNullability((PsiLocalVariable)probe.getDeclaredElements()[0]); final PsiAnnotation[] annotations = type.getAnnotations(); return type.annotate(new TypeAnnotationProvider() { @Override public PsiAnnotation @NotNull [] getAnnotations() { final NullableNotNullManager manager = NullableNotNullManager.getInstance(project); final Set<String> nullables = new HashSet<>(); Nullability nullability = nullabilityAnnotationInfo != null ? nullabilityAnnotationInfo.getNullability() : Nullability.UNKNOWN; if (nullability == Nullability.UNKNOWN) { nullables.addAll(manager.getNotNulls()); nullables.addAll(manager.getNullables()); } else if (nullability == Nullability.NOT_NULL) { nullables.addAll(manager.getNotNulls()); } else if (nullability == Nullability.NULLABLE){ nullables.addAll(manager.getNullables()); } return Arrays.stream(annotations) .filter(annotation -> !nullables.contains(annotation.getQualifiedName())) .toArray(PsiAnnotation[]::new); } }); } @NotNull private static PsiElement correctAnchor(PsiExpression expr, @NotNull PsiElement anchor, PsiExpression[] occurrences) { if (!expr.isPhysical()) { expr = ObjectUtils.tryCast(expr.getUserData(ElementToWorkOn.PARENT), PsiExpression.class); if (expr == null) return anchor; } if (anchor instanceof PsiSwitchLabelStatementBase) { PsiSwitchBlock block = ((PsiSwitchLabelStatementBase)anchor).getEnclosingSwitchBlock(); if (block == null) return anchor; anchor = block; if (anchor instanceof PsiExpression) { expr = (PsiExpression)anchor; } } Set<PsiExpression> allOccurrences = StreamEx.of(occurrences).filter(PsiElement::isPhysical).append(expr).toSet(); PsiExpression firstOccurrence = Collections.min(allOccurrences, Comparator.comparing(e -> e.getTextRange().getStartOffset())); if (HighlightUtil.Feature.PATTERNS.isAvailable(anchor)) { PsiTypeCastExpression cast = ObjectUtils.tryCast(PsiUtil.skipParenthesizedExprDown(firstOccurrence), PsiTypeCastExpression.class); if (cast != null && !(cast.getType() instanceof PsiPrimitiveType) && !(PsiUtil.skipParenthesizedExprUp(firstOccurrence.getParent()) instanceof PsiExpressionStatement)) { PsiInstanceOfExpression candidate = InstanceOfUtils.findPatternCandidate(cast); if (candidate != null && allOccurrences.stream() .map(occ -> ObjectUtils.tryCast(PsiUtil.skipParenthesizedExprDown(firstOccurrence), PsiTypeCastExpression.class)) .allMatch(occ -> occ != null && (occ == firstOccurrence || InstanceOfUtils.findPatternCandidate(occ) == candidate))) { return candidate; } } } if (anchor instanceof PsiWhileStatement) { PsiWhileStatement whileStatement = (PsiWhileStatement)anchor; PsiExpression condition = whileStatement.getCondition(); if (condition != null && allOccurrences.stream().allMatch(occurrence -> PsiTreeUtil.isAncestor(whileStatement, occurrence, true))) { if (firstOccurrence != null && PsiTreeUtil.isAncestor(condition, firstOccurrence, false)) { PsiPolyadicExpression polyadic = ObjectUtils.tryCast(PsiUtil.skipParenthesizedExprDown(condition), PsiPolyadicExpression.class); if (polyadic != null && JavaTokenType.ANDAND.equals(polyadic.getOperationTokenType())) { PsiExpression operand = ContainerUtil.find(polyadic.getOperands(), op -> PsiTreeUtil.isAncestor(op, firstOccurrence, false)); operand = PsiUtil.skipParenthesizedExprDown(operand); LOG.assertTrue(operand != null); return operand; } return condition; } } } if (firstOccurrence != null && ControlFlowUtils.canExtractStatement(firstOccurrence) && !PsiUtil.isAccessedForWriting(firstOccurrence)) { PsiExpression ancestorCandidate = ExpressionUtils.getTopLevelExpression(firstOccurrence); if (PsiTreeUtil.isAncestor(anchor, ancestorCandidate, false)) { PsiElement statement = RefactoringUtil.getParentStatement(ancestorCandidate, false); if (allOccurrences.stream().allMatch(occurrence -> PsiTreeUtil.isAncestor(statement, occurrence, false) && (!PsiTreeUtil.isAncestor(ancestorCandidate, occurrence, false) || ReorderingUtils.canExtract(ancestorCandidate, occurrence) == ThreeState.NO))) { return firstOccurrence; } } } if (anchor instanceof PsiTryStatement && firstOccurrence != null) { PsiResourceList resourceList = ((PsiTryStatement)anchor).getResourceList(); PsiElement parent = firstOccurrence.getParent(); if (resourceList != null && parent instanceof PsiResourceExpression && parent.getParent() == resourceList && InheritanceUtil.isInheritor(firstOccurrence.getType(), CommonClassNames.JAVA_LANG_AUTO_CLOSEABLE)) { return parent; } } if (anchor.getParent() instanceof PsiSwitchLabeledRuleStatement) { return ExpressionUtils.getTopLevelExpression(expr); } if (RefactoringUtil.isLoopOrIf(anchor.getParent())) return anchor; PsiElement child = locateAnchor(anchor); if (IntroduceVariableBase.isFinalVariableOnLHS(expr)) { child = child.getNextSibling(); } return child == null ? anchor : child; } private static PsiElement locateAnchor(PsiElement child) { while (child != null) { PsiElement prev = child.getPrevSibling(); if (prev instanceof PsiStatement) break; if (PsiUtil.isJavaToken(prev, JavaTokenType.LBRACE)) break; child = prev; } while (child instanceof PsiWhiteSpace || child instanceof PsiComment) { child = child.getNextSibling(); } return child; } @Nullable public static PsiVariable introduce(final Project project, final PsiExpression expr, final Editor editor, final PsiElement anchorStatement, final PsiExpression[] occurrences, final IntroduceVariableSettings settings) { Computable<SmartPsiElementPointer<PsiVariable>> computation = new VariableExtractor(project, expr, editor, anchorStatement, occurrences, settings)::extractVariable; SmartPsiElementPointer<PsiVariable> pointer = ApplicationManager.getApplication().runWriteAction(computation); return pointer != null ? pointer.getElement() : null; } }
/* * Copyright (c) 2012 Jason Polites * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.polites.android; import java.io.InputStream; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import android.content.Context; import android.content.res.Configuration; import android.database.Cursor; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.Canvas; import android.graphics.ColorFilter; import android.graphics.Matrix; import android.graphics.drawable.BitmapDrawable; import android.graphics.drawable.Drawable; import android.net.Uri; import android.provider.MediaStore; import android.util.AttributeSet; import android.util.Log; import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup.LayoutParams; import android.widget.ImageView; public class GestureImageView extends ImageView { public static final String GLOBAL_NS = "http://schemas.android.com/apk/res/android"; public static final String LOCAL_NS = "http://schemas.polites.com/android"; private final Semaphore drawLock = new Semaphore(0); private Animator animator; private Drawable drawable; private float x = 0, y = 0; private boolean layout = false; private float scaleAdjust = 1.0f; private float startingScale = -1.0f; private float scale = 1.0f; private float maxScale = 5.0f; private float minScale = 0.75f; private float fitScaleHorizontal = 1.0f; private float fitScaleVertical = 1.0f; private float rotation = 0.0f; private float centerX; private float centerY; private Float startX, startY; private int hWidth; private int hHeight; private int resId = -1; private boolean recycle = false; private boolean strict = false; private int displayHeight; private int displayWidth; private int alpha = 255; private ColorFilter colorFilter; private int deviceOrientation = -1; private int imageOrientation; private GestureImageViewListener gestureImageViewListener; private GestureImageViewTouchListener gestureImageViewTouchListener; private OnTouchListener customOnTouchListener; private OnClickListener onClickListener; public GestureImageView(Context context, AttributeSet attrs, int defStyle) { this(context, attrs); } public GestureImageView(Context context, AttributeSet attrs) { super(context, attrs); String scaleType = attrs.getAttributeValue(GLOBAL_NS, "scaleType"); if(scaleType == null || scaleType.trim().length() == 0) { setScaleType(ScaleType.CENTER_INSIDE); } String strStartX = attrs.getAttributeValue(LOCAL_NS, "start-x"); String strStartY = attrs.getAttributeValue(LOCAL_NS, "start-y"); if(strStartX != null && strStartX.trim().length() > 0) { startX = Float.parseFloat(strStartX); } if(strStartY != null && strStartY.trim().length() > 0) { startY = Float.parseFloat(strStartY); } setStartingScale(attrs.getAttributeFloatValue(LOCAL_NS, "start-scale", startingScale)); setMinScale(attrs.getAttributeFloatValue(LOCAL_NS, "min-scale", minScale)); setMaxScale(attrs.getAttributeFloatValue(LOCAL_NS, "max-scale", maxScale)); setStrict(attrs.getAttributeBooleanValue(LOCAL_NS, "strict", strict)); setRecycle(attrs.getAttributeBooleanValue(LOCAL_NS, "recycle", recycle)); initImage(); } public GestureImageView(Context context) { super(context); setScaleType(ScaleType.CENTER_INSIDE); initImage(); } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { if(drawable != null) { int orientation = getResources().getConfiguration().orientation; if(orientation == Configuration.ORIENTATION_LANDSCAPE) { displayHeight = MeasureSpec.getSize(heightMeasureSpec); if(getLayoutParams().width == LayoutParams.WRAP_CONTENT) { float ratio = (float) getImageWidth() / (float) getImageHeight(); displayWidth = Math.round( (float) displayHeight * ratio) ; } else { displayWidth = MeasureSpec.getSize(widthMeasureSpec); } } else { displayWidth = MeasureSpec.getSize(widthMeasureSpec); if(getLayoutParams().height == LayoutParams.WRAP_CONTENT) { float ratio = (float) getImageHeight() / (float) getImageWidth(); displayHeight = Math.round( (float) displayWidth * ratio) ; } else { displayHeight = MeasureSpec.getSize(heightMeasureSpec); } } } else { displayHeight = MeasureSpec.getSize(heightMeasureSpec); displayWidth = MeasureSpec.getSize(widthMeasureSpec); } setMeasuredDimension(displayWidth, displayHeight); } @Override protected void onLayout(boolean changed, int left, int top, int right, int bottom) { super.onLayout(changed, left, top, right, bottom); if(changed || !layout) { setupCanvas(displayWidth, displayHeight, getResources().getConfiguration().orientation); } } protected void setupCanvas(int measuredWidth, int measuredHeight, int orientation) { if(deviceOrientation != orientation) { layout = false; deviceOrientation = orientation; } if(drawable != null && !layout) { int imageWidth = getImageWidth(); int imageHeight = getImageHeight(); hWidth = Math.round(((float)imageWidth / 2.0f)); hHeight = Math.round(((float)imageHeight / 2.0f)); measuredWidth -= (getPaddingLeft() + getPaddingRight()); measuredHeight -= (getPaddingTop() + getPaddingBottom()); computeCropScale(imageWidth, imageHeight, measuredWidth, measuredHeight); if(startingScale <= 0.0f) { computeStartingScale(imageWidth, imageHeight, measuredWidth, measuredHeight); } scaleAdjust = startingScale; this.centerX = (float) measuredWidth / 2.0f; this.centerY = (float) measuredHeight / 2.0f; if(startX == null) { x = centerX; } else { x = startX; } if(startY == null) { y = centerY; } else { y = startY; } gestureImageViewTouchListener = new GestureImageViewTouchListener(this, measuredWidth, measuredHeight); if(isLandscape()) { gestureImageViewTouchListener.setMinScale(minScale * fitScaleHorizontal); } else { gestureImageViewTouchListener.setMinScale(minScale * fitScaleVertical); } gestureImageViewTouchListener.setMaxScale(maxScale * startingScale); gestureImageViewTouchListener.setFitScaleHorizontal(fitScaleHorizontal); gestureImageViewTouchListener.setFitScaleVertical(fitScaleVertical); gestureImageViewTouchListener.setCanvasWidth(measuredWidth); gestureImageViewTouchListener.setCanvasHeight(measuredHeight); gestureImageViewTouchListener.setOnClickListener(onClickListener); drawable.setBounds(-hWidth,-hHeight,hWidth,hHeight); super.setOnTouchListener(new OnTouchListener() { @Override public boolean onTouch(View v, MotionEvent event) { if(customOnTouchListener != null) { customOnTouchListener.onTouch(v, event); } return gestureImageViewTouchListener.onTouch(v, event); } }); layout = true; } } protected void computeCropScale(int imageWidth, int imageHeight, int measuredWidth, int measuredHeight) { fitScaleHorizontal = (float) measuredWidth / (float) imageWidth; fitScaleVertical = (float) measuredHeight / (float) imageHeight; } protected void computeStartingScale(int imageWidth, int imageHeight, int measuredWidth, int measuredHeight) { switch(getScaleType()) { case CENTER: // Center the image in the view, but perform no scaling. startingScale = 1.0f; break; case CENTER_CROP: startingScale = Math.max((float) measuredHeight / (float) imageHeight, (float) measuredWidth/ (float) imageWidth); break; case CENTER_INSIDE: if(isLandscape()) { startingScale = fitScaleHorizontal; } else { startingScale = fitScaleVertical; } break; } } protected boolean isRecycled() { if(drawable != null && drawable instanceof BitmapDrawable) { Bitmap bitmap = ((BitmapDrawable)drawable).getBitmap(); if(bitmap != null) { return bitmap.isRecycled(); } } return false; } protected void recycle() { if(recycle && drawable != null && drawable instanceof BitmapDrawable) { Bitmap bitmap = ((BitmapDrawable)drawable).getBitmap(); if(bitmap != null) { bitmap.recycle(); } } } @Override protected void onDraw(Canvas canvas) { if(layout) { if(drawable != null && !isRecycled()) { canvas.save(); float adjustedScale = scale * scaleAdjust; canvas.translate(x, y); if(rotation != 0.0f) { canvas.rotate(rotation); } if(adjustedScale != 1.0f) { canvas.scale(adjustedScale, adjustedScale); } drawable.draw(canvas); canvas.restore(); } if(drawLock.availablePermits() <= 0) { drawLock.release(); } } } /** * Waits for a draw * @param max time to wait for draw (ms) * @throws InterruptedException */ public boolean waitForDraw(long timeout) throws InterruptedException { return drawLock.tryAcquire(timeout, TimeUnit.MILLISECONDS); } @Override protected void onAttachedToWindow() { animator = new Animator(this, "GestureImageViewAnimator"); animator.start(); if(resId >= 0 && drawable == null) { setImageResource(resId); } super.onAttachedToWindow(); } public void animationStart(Animation animation) { if(animator != null) { animator.play(animation); } } public void animationStop() { if(animator != null) { animator.cancel(); } } @Override protected void onDetachedFromWindow() { if(animator != null) { animator.finish(); } if(recycle && drawable != null && !isRecycled()) { recycle(); drawable = null; } super.onDetachedFromWindow(); } protected void initImage() { if(this.drawable != null) { this.drawable.setAlpha(alpha); this.drawable.setFilterBitmap(true); if(colorFilter != null) { this.drawable.setColorFilter(colorFilter); } } if(!layout) { requestLayout(); redraw(); } } public void setImageBitmap(Bitmap image) { this.drawable = new BitmapDrawable(getResources(), image); initImage(); } @Override public void setImageDrawable(Drawable drawable) { this.drawable = drawable; initImage(); } public void setImageResource(int id) { if(this.drawable != null) { this.recycle(); } if(id >= 0) { this.resId = id; setImageDrawable(getContext().getResources().getDrawable(id)); } } public int getScaledWidth() { return Math.round(getImageWidth() * getScale()); } public int getScaledHeight() { return Math.round(getImageHeight() * getScale()); } public int getImageWidth() { if(drawable != null) { return drawable.getIntrinsicWidth(); } return 0; } public int getImageHeight() { if(drawable != null) { return drawable.getIntrinsicHeight(); } return 0; } public void moveBy(float x, float y) { this.x += x; this.y += y; } public void setPosition(float x, float y) { this.x = x; this.y = y; } public void redraw() { postInvalidate(); } public void setMinScale(float min) { this.minScale = min; if(gestureImageViewTouchListener != null) { gestureImageViewTouchListener.setMinScale(min * fitScaleHorizontal); } } public void setMaxScale(float max) { this.maxScale = max; if(gestureImageViewTouchListener != null) { gestureImageViewTouchListener.setMaxScale(max * startingScale); } } public void setScale(float scale) { scaleAdjust = scale; } public float getScale() { return scaleAdjust; } public float getImageX() { return x; } public float getImageY() { return y; } public boolean isStrict() { return strict; } public void setStrict(boolean strict) { this.strict = strict; } public boolean isRecycle() { return recycle; } public void setRecycle(boolean recycle) { this.recycle = recycle; } public void reset() { x = centerX; y = centerY; scaleAdjust = startingScale; if (gestureImageViewTouchListener != null) { gestureImageViewTouchListener.reset(); } redraw(); } public void setRotation(float rotation) { this.rotation = rotation; } public void setGestureImageViewListener(GestureImageViewListener pinchImageViewListener) { this.gestureImageViewListener = pinchImageViewListener; } public GestureImageViewListener getGestureImageViewListener() { return gestureImageViewListener; } @Override public Drawable getDrawable() { return drawable; } @Override public void setAlpha(int alpha) { this.alpha = alpha; if(drawable != null) { drawable.setAlpha(alpha); } } @Override public void setColorFilter(ColorFilter cf) { this.colorFilter = cf; if(drawable != null) { drawable.setColorFilter(cf); } } @Override public void setImageURI(Uri mUri) { if ("content".equals(mUri.getScheme())) { try { String[] orientationColumn = {MediaStore.Images.Media.ORIENTATION}; Cursor cur = getContext().getContentResolver().query(mUri, orientationColumn, null, null, null); if (cur != null && cur.moveToFirst()) { imageOrientation = cur.getInt(cur.getColumnIndex(orientationColumn[0])); } InputStream in = null; try { in = getContext().getContentResolver().openInputStream(mUri); Bitmap bmp = BitmapFactory.decodeStream(in); if(imageOrientation != 0) { Matrix m = new Matrix(); m.postRotate(imageOrientation); Bitmap rotated = Bitmap.createBitmap(bmp, 0, 0, bmp.getWidth(), bmp.getHeight(), m, true); bmp.recycle(); setImageDrawable(new BitmapDrawable(getResources(), rotated)); } else { setImageDrawable(new BitmapDrawable(getResources(), bmp)); } } finally { if(in != null) { in.close(); } if(cur != null) { cur.close(); } } } catch (Exception e) { Log.w("GestureImageView", "Unable to open content: " + mUri, e); } } else { setImageDrawable(Drawable.createFromPath(mUri.toString())); } if (drawable == null) { Log.e("GestureImageView", "resolveUri failed on bad bitmap uri: " + mUri); // Don't try again. mUri = null; } } @Override public Matrix getImageMatrix() { if(strict) { throw new UnsupportedOperationException("Not supported"); } return super.getImageMatrix(); } @Override public void setScaleType(ScaleType scaleType) { if(scaleType == ScaleType.CENTER || scaleType == ScaleType.CENTER_CROP || scaleType == ScaleType.CENTER_INSIDE) { super.setScaleType(scaleType); } else if(strict) { throw new UnsupportedOperationException("Not supported"); } } @Override public void invalidateDrawable(Drawable dr) { if(strict) { throw new UnsupportedOperationException("Not supported"); } super.invalidateDrawable(dr); } @Override public int[] onCreateDrawableState(int extraSpace) { if(strict) { throw new UnsupportedOperationException("Not supported"); } return super.onCreateDrawableState(extraSpace); } @Override public void setAdjustViewBounds(boolean adjustViewBounds) { if(strict) { throw new UnsupportedOperationException("Not supported"); } super.setAdjustViewBounds(adjustViewBounds); } @Override public void setImageLevel(int level) { if(strict) { throw new UnsupportedOperationException("Not supported"); } super.setImageLevel(level); } @Override public void setImageMatrix(Matrix matrix) { if(strict) { throw new UnsupportedOperationException("Not supported"); } } @Override public void setImageState(int[] state, boolean merge) { if(strict) { throw new UnsupportedOperationException("Not supported"); } } @Override public void setSelected(boolean selected) { if(strict) { throw new UnsupportedOperationException("Not supported"); } super.setSelected(selected); } @Override public void setOnTouchListener(OnTouchListener l) { this.customOnTouchListener = l; } public float getCenterX() { return centerX; } public float getCenterY() { return centerY; } public boolean isLandscape() { return getImageWidth() >= getImageHeight(); } public boolean isPortrait() { return getImageWidth() <= getImageHeight(); } public void setStartingScale(float startingScale) { this.startingScale = startingScale; } public void setStartingPosition(float x, float y) { this.startX = x; this.startY = y; } @Override public void setOnClickListener(OnClickListener l) { this.onClickListener = l; if(gestureImageViewTouchListener != null) { gestureImageViewTouchListener.setOnClickListener(l); } } /** * Returns true if the image dimensions are aligned with the orientation of the device. * @return */ public boolean isOrientationAligned() { if(deviceOrientation == Configuration.ORIENTATION_LANDSCAPE) { return isLandscape(); } else if(deviceOrientation == Configuration.ORIENTATION_PORTRAIT) { return isPortrait(); } return true; } public int getDeviceOrientation() { return deviceOrientation; } }
/* * The MIT License (MIT) * * Copyright (c) 2007-2015 Broad Institute * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.broad.igv.util; import biz.source_code.base64Coder.Base64Coder; import htsjdk.samtools.seekablestream.SeekableStream; import htsjdk.samtools.util.ftp.FTPClient; import htsjdk.samtools.util.ftp.FTPStream; import org.apache.log4j.Logger; import org.apache.tomcat.util.HttpDate; import org.broad.igv.Globals; import org.broad.igv.exceptions.HttpResponseException; import org.broad.igv.google.GoogleUtils; import org.broad.igv.google.OAuthUtils; import org.broad.igv.prefs.IGVPreferences; import org.broad.igv.prefs.PreferencesManager; import org.broad.igv.ui.IGV; import org.broad.igv.ui.util.MessageUtils; import org.broad.igv.util.collections.CI; import org.broad.igv.util.ftp.FTPUtils; import javax.net.ssl.HttpsURLConnection; import javax.net.ssl.SSLContext; import javax.net.ssl.TrustManager; import javax.net.ssl.X509TrustManager; import javax.swing.*; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.io.*; import java.net.*; import java.security.KeyManagementException; import java.security.NoSuchAlgorithmException; import java.time.ZonedDateTime; import java.time.format.DateTimeParseException; import java.util.List; import java.util.*; import java.util.zip.GZIPInputStream; import static org.broad.igv.prefs.Constants.*; import static org.broad.igv.util.stream.SeekableServiceStream.WEBSERVICE_URL; /** * Wrapper utility class... for interacting with HttpURLConnection. * * @author Jim Robinson * @date 9/22/11 */ public class HttpUtils { private static Logger log = Logger.getLogger(HttpUtils.class); private static HttpUtils instance; private Map<String, Boolean> byteRangeTestMap; private ProxySettings proxySettings = null; private final int MAX_REDIRECTS = 5; private String defaultUserName = null; private char[] defaultPassword = null; private Map<String, Collection<String>> headerMap = new HashMap<>(); // static provided to support unit testing private static boolean BYTE_RANGE_DISABLED = false; private Map<URL, Boolean> headURLCache = new HashMap<URL, Boolean>(); private class CachedRedirect { private URL url = null; private ZonedDateTime expires = null; } // remember HTTP redirects private final int DEFAULT_REDIRECT_EXPIRATION_MIN = 15; private Map<URL, CachedRedirect> redirectCache = new HashMap<URL, CachedRedirect>(); /** * @return the single instance */ public static HttpUtils getInstance() { if (instance == null) { instance = new HttpUtils(); } return instance; } private HttpUtils() { disableCertificateValidation(); Authenticator.setDefault(new IGVAuthenticator()); try { System.setProperty("java.net.useSystemProxies", "true"); } catch (Exception e) { log.info("Couldn't set useSystemProxies=true"); } byteRangeTestMap = Collections.synchronizedMap(new HashMap()); } /** * Create a URL from the given string. Performs various mappings for google buckets, amazon cNames, and * http -> https conversions * * @param urlString * @return * @throws MalformedURLException */ public static URL createURL(String urlString) throws MalformedURLException { urlString = mapURL(urlString.trim()); return new URL(urlString); } public static String mapURL(String urlString) throws MalformedURLException { if (urlString.startsWith("gs://")) { urlString = GoogleUtils.translateGoogleCloudURL(urlString); } else if (AmazonUtils.isAwsS3Path(urlString)) { try { urlString = AmazonUtils.translateAmazonCloudURL(urlString); } catch (IOException e) { log.error(e); } } if (GoogleUtils.isGoogleCloud(urlString)) { if (urlString.indexOf("alt=media") < 0) { urlString = URLUtils.addParameter(urlString, "alt=media"); } } String host = URLUtils.getHost(urlString); if (host.equals("igv.broadinstitute.org")) { urlString = urlString.replace("igv.broadinstitute.org", "s3.amazonaws.com/igv.broadinstitute.org"); } else if (host.equals("igvdata.broadinstitute.org")) { urlString = urlString.replace("igvdata.broadinstitute.org", "s3.amazonaws.com/igv.broadinstitute.org"); } else if (host.equals("dn7ywbm9isq8j.cloudfront.net")) { urlString = urlString.replace("dn7ywbm9isq8j.cloudfront.net", "s3.amazonaws.com/igv.broadinstitute.org"); } else if (host.equals("www.broadinstitute.org")) { urlString = urlString.replace("www.broadinstitute.org/igvdata", "data.broadinstitute.org/igvdata"); } else if (host.equals("www.dropbox.com")) { urlString = urlString.replace("//www.dropbox.com", "//dl.dropboxusercontent.com"); } else if (host.equals("drive.google.com")) { urlString = GoogleUtils.driveDownloadURL(urlString); } // data.broadinstitute.org requires https urlString = urlString.replace("http://data.broadinstitute.org", "https://data.broadinstitute.org"); return urlString; } public static boolean isRemoteURL(String string) { return FileUtils.isRemote(string); } /** * Provided to support unit testing (force disable byte range requests) * * @return */ public static void disableByteRange(boolean b) { BYTE_RANGE_DISABLED = b; } /** * Return the contents of the url as a String. This method should only be used for queries expected to return * a small amount of data. * * @param url * @return * @throws IOException */ public String getContentsAsString(URL url) throws IOException { return getContentsAsString(url, null); } public String getContentsAsString(URL url, Map<String, String> headers) throws IOException { InputStream is = null; HttpURLConnection conn = openConnection(url, headers); try { is = conn.getInputStream(); return readContents(is); } catch (IOException e) { readErrorStream(conn); // Consume content throw e; } finally { if (is != null) is.close(); } } public String getContentsAsGzippedString(URL url) throws IOException { InputStream is = null; HttpURLConnection conn = openConnection(url, null); try { is = conn.getInputStream(); return readContents(new GZIPInputStream(is)); } catch (IOException e) { readErrorStream(conn); // Consume content throw e; } finally { if (is != null) is.close(); } } public String getContentsAsJSON(URL url) throws IOException { InputStream is = null; Map<String, String> reqProperties = new HashMap(); reqProperties.put("Accept", "application/json,text/plain"); HttpURLConnection conn = openConnection(url, reqProperties); try { is = conn.getInputStream(); return readContents(is); } catch (IOException e) { readErrorStream(conn); // Consume content throw e; } finally { if (is != null) is.close(); } } public String doPost(URL url, Map<String, String> params) throws IOException { StringBuilder postData = new StringBuilder(); for (Map.Entry<String, String> param : params.entrySet()) { if (postData.length() != 0) postData.append('&'); postData.append(param.getKey()); postData.append('='); postData.append(param.getValue()); } byte[] postDataBytes = postData.toString().getBytes(); log.debug("Raw POST request: " + postData.toString()); HttpURLConnection conn = (HttpURLConnection) url.openConnection(); conn.setRequestMethod("POST"); conn.setRequestProperty("Content-Type", "application/x-www-form-urlencoded"); conn.setDoOutput(true); conn.getOutputStream().write(postDataBytes); StringBuilder response = new StringBuilder(); Reader in = new BufferedReader(new InputStreamReader(conn.getInputStream(), "UTF-8")); for (int c; (c = in.read()) >= 0; ) { response.append((char) c); } in.close(); return response.toString(); } /** * Open a connection stream for the URL. * * @param url * @return * @throws IOException */ public InputStream openConnectionStream(URL url) throws IOException { log.debug("Opening connection stream to " + url); if (url.getProtocol().toLowerCase().equals("ftp")) { String userInfo = url.getUserInfo(); String host = url.getHost(); String file = url.getPath(); FTPClient ftp = FTPUtils.connect(host, userInfo, new UserPasswordInputImpl()); ftp.pasv(); ftp.retr(file); return new FTPStream(ftp); } else { return openConnectionStream(url, null); } } public InputStream openConnectionStream(URL url, Map<String, String> requestProperties) throws IOException { HttpURLConnection conn = openConnection(url, requestProperties); if (conn == null) { return null; } if ((requestProperties != null) && requestProperties.containsKey("Range") && conn.getResponseCode() != 206) { String msg = "Warning: range requested, but response code = " + conn.getResponseCode(); log.error(msg); } try { InputStream input = conn.getInputStream(); return input; } catch (IOException e) { readErrorStream(conn); // Consume content throw e; } } public boolean resourceAvailable(String urlString) { URL url = null; try { url = HttpUtils.createURL(urlString); } catch (MalformedURLException e) { return false; } log.debug("Checking if resource is available: " + url); if (url.getProtocol().toLowerCase().equals("ftp")) { return FTPUtils.resourceAvailable(url); } else { HttpURLConnection conn = null; try { conn = openConnectionHeadOrGet(url); int code = conn.getResponseCode(); return code >= 200 && code < 300; } catch (Exception e) { if (conn != null) try { readErrorStream(conn); // Consume content } catch (IOException e1) { e1.printStackTrace(); } return false; } finally { if (conn != null) { try { conn.disconnect(); } catch (Exception e) { } } } } } /** * First tries a HEAD request, then a GET request if the HEAD fails. * If the GET fails, the exception is thrown * * @param url * @return * @throws IOException */ private HttpURLConnection openConnectionHeadOrGet(URL url) throws IOException { // Keep track of urls for which "HEAD" does not work (e.g. Amazon with signed urls). String urlString = url.toString(); boolean isAWS = urlString.contains("AWSAccessKeyId"); boolean tryHead = isAWS == false && (headURLCache.containsKey(url) ? headURLCache.get(url) : true); if (tryHead) { try { HttpURLConnection conn = openConnection(url, null, "HEAD"); headURLCache.put(url, true); return conn; } catch (IOException e) { if (e instanceof FileNotFoundException) { throw e; } log.debug("HEAD request failed for url: " + url.toExternalForm()); log.debug("Trying GET instead for url: " + url.toExternalForm()); headURLCache.put(url, false); } } return openConnection(url, null, "GET"); } public String getHeaderField(URL url, String key) throws IOException { HttpURLConnection conn = openConnectionHeadOrGet(url); if (conn == null) return null; return conn.getHeaderField(key); } public long getLastModified(URL url) throws IOException { HttpURLConnection conn = openConnectionHeadOrGet(url); if (conn == null) return 0; return conn.getLastModified(); } public long getContentLength(URL url) throws IOException { try { String contentLengthString = getHeaderField(url, "Content-Length"); if (contentLengthString == null) { return -1; } else { return Long.parseLong(contentLengthString); } } catch (Exception e) { log.error("Error fetching content length", e); return -1; } } /** * Compare a local and remote resource, returning true if it is believed that the * remote file is newer than the local file * * @param file * @param url * @return true if the files are the same or the local file is newer, false if the remote file has been modified wrt the local one. * @throws IOException */ public boolean remoteIsNewer(File file, URL url) throws IOException { if (!file.exists()) { return false; } HttpURLConnection conn = openConnection(url, null, "HEAD"); // Compare last-modified dates String lastModifiedString = conn.getHeaderField("Last-Modified"); if (lastModifiedString == null) { return true; // Assume its changed } else { HttpDate date = new HttpDate(); date.parse(lastModifiedString); long remoteModifiedTime = date.getTime(); long localModifiedTime = file.lastModified(); return remoteModifiedTime > localModifiedTime; } } public void updateProxySettings() { boolean useProxy; String proxyHost; int proxyPort = -1; boolean auth = false; String user = null; String pw = null; IGVPreferences prefMgr = PreferencesManager.getPreferences(); proxyHost = prefMgr.get(PROXY_HOST, null); try { proxyPort = Integer.parseInt(prefMgr.get(PROXY_PORT, "-1")); } catch (NumberFormatException e) { proxyPort = -1; } useProxy = prefMgr.getAsBoolean(USE_PROXY) && proxyHost != null && proxyHost.trim().length() > 0; auth = prefMgr.getAsBoolean(PROXY_AUTHENTICATE); user = prefMgr.get(PROXY_USER, null); String pwString = prefMgr.get(PROXY_PW, null); if (pwString != null) { pw = Utilities.base64Decode(pwString); } String proxyTypeString = prefMgr.get(PROXY_TYPE, "HTTP"); Proxy.Type type = Proxy.Type.valueOf(proxyTypeString.trim().toUpperCase()); String proxyWhitelistString = prefMgr.get(PROXY_WHITELIST); Set<String> whitelist = proxyWhitelistString == null ? new HashSet<String>() : new HashSet(Arrays.asList(Globals.commaPattern.split(proxyWhitelistString))); proxySettings = new ProxySettings(useProxy, user, pw, auth, proxyHost, proxyPort, type, whitelist); } /** * Get the system defined proxy defined for the URI, or null if * not available. May also return a {@code Proxy} object which * represents a direct connection * * @param uri * @return */ private Proxy getSystemProxy(String uri) { try { if (PreferencesManager.getPreferences().getAsBoolean("DEBUG.PROXY")) log.info("Getting system proxy for " + uri); ProxySelector selector = ProxySelector.getDefault(); List<Proxy> proxyList = selector.select(new URI(uri)); return proxyList.get(0); } catch (URISyntaxException e) { log.error(e.getMessage(), e); return null; } catch (NullPointerException e) { return null; } catch (Exception e) { log.error(e.getMessage(), e); return null; } } /** * Calls {@link #downloadFile(String, java.io.File, Frame, String)} * with {@code dialogsParent = null, title = null} * * @param url * @param outputFile * @return RunnableResult * @throws IOException */ public RunnableResult downloadFile(String url, File outputFile) throws IOException { URLDownloader downloader = downloadFile(url, outputFile, null, null); return downloader.getResult(); } /** * @param url * @param outputFile * @param dialogsParent Parent of dialog to show progress. If null, none shown * @return URLDownloader used to perform download * @throws IOException */ public URLDownloader downloadFile(String url, File outputFile, Frame dialogsParent, String dialogTitle) throws IOException { final URLDownloader urlDownloader = new URLDownloader(url, outputFile); boolean showProgressDialog = dialogsParent != null; if (!showProgressDialog) { urlDownloader.run(); return urlDownloader; } else { javax.swing.ProgressMonitor monitor = new javax.swing.ProgressMonitor(IGV.getInstance().getMainPanel(), "Downloading " + outputFile.getName(), "", 0, 100); urlDownloader.setMonitor(monitor); ActionListener buttonListener = new ActionListener() { @Override public void actionPerformed(ActionEvent e) { urlDownloader.cancel(true); } }; // String permText = "Downloading " + url; // String title = dialogTitle != null ? dialogTitle : permText; // CancellableProgressDialog dialog = CancellableProgressDialog.showCancellableProgressDialog(dialogsParent, title, buttonListener, false, monitor); // dialog.setPermText(permText); // Dimension dms = new Dimension(600, 150); // dialog.setPreferredSize(dms); // dialog.setSize(dms); // dialog.validate(); LongRunningTask.submit(urlDownloader); return urlDownloader; } } /** * Code for disabling SSL certification */ private void disableCertificateValidation() { // Create a trust manager that does not validate certificate chains TrustManager[] trustAllCerts = new TrustManager[]{ new X509TrustManager() { public java.security.cert.X509Certificate[] getAcceptedIssuers() { return null; } public void checkClientTrusted( java.security.cert.X509Certificate[] certs, String authType) { } public void checkServerTrusted( java.security.cert.X509Certificate[] certs, String authType) { } } }; // Install the all-trusting trust manager try { SSLContext sc = SSLContext.getInstance("SSL"); sc.init(null, trustAllCerts, null); HttpsURLConnection.setDefaultSSLSocketFactory(sc.getSocketFactory()); } catch (NoSuchAlgorithmException e) { } catch (KeyManagementException e) { } } private String readContents(InputStream is) throws IOException { return ParsingUtils.readContentsFromStream(is); } public String readErrorStream(HttpURLConnection connection) throws IOException { InputStream inputStream = null; try { inputStream = connection.getErrorStream(); if (inputStream == null) { return null; } return readContents(inputStream); } finally { if (inputStream != null) inputStream.close(); } } public HttpURLConnection delete(URL url) throws IOException { return openConnection(url, Collections.<String, String>emptyMap(), "DELETE"); } public HttpURLConnection openConnection(URL url, Map<String, String> requestProperties) throws IOException { return openConnection(url, requestProperties, "GET"); } private HttpURLConnection openConnection(URL url, Map<String, String> requestProperties, String method) throws IOException { return openConnection(url, requestProperties, method, 0, 0); } /** * The "real" connection method * * @param url * @param requestProperties * @param method * @return * @throws java.io.IOException */ private HttpURLConnection openConnection( URL url, Map<String, String> requestProperties, String method, int redirectCount, int retries) throws IOException { // if we're already seen a redirect for this URL, use the updated one if (redirectCache.containsKey(url)) { CachedRedirect cr = redirectCache.get(url); if (ZonedDateTime.now().compareTo(cr.expires) < 0.0) { // now() is before our expiration log.debug("Found URL in redirection cache: " + url + " ->" + redirectCache.get(url).url); url = cr.url; } else { log.debug("Removing expired URL from redirection cache: " + url); redirectCache.remove(url); } } // if the url points to a openid location instead of a oauth2.0 location, used the fina and replace // string to dynamically map url - dwm08 if (url.getHost().equals(GoogleUtils.GOOGLE_API_HOST) && OAuthUtils.findString != null && OAuthUtils.replaceString != null) { url = HttpUtils.createURL(url.toExternalForm().replaceFirst(OAuthUtils.findString, OAuthUtils.replaceString)); } //Encode query string portions url = StringUtils.encodeURLQueryString(url); if (log.isTraceEnabled()) { log.trace(url); } //Encode base portions. Right now just spaces, most common case //TODO This is a hack and doesn't work for all characters which need it if (StringUtils.countChar(url.toExternalForm(), ' ') > 0) { String newPath = url.toExternalForm().replaceAll(" ", "%20"); url = HttpUtils.createURL(newPath); } Proxy sysProxy = null; boolean igvProxySettingsExist = proxySettings != null && proxySettings.useProxy; boolean checkSystemProxy = !PreferencesManager.getPreferences().getAsBoolean("PROXY.DISABLE_CHECK") && !igvProxySettingsExist; //Only check for system proxy if igv proxy settings not found if (checkSystemProxy) { sysProxy = getSystemProxy(url.toExternalForm()); } boolean useProxy = (sysProxy != null && sysProxy.type() != Proxy.Type.DIRECT) || (igvProxySettingsExist && !proxySettings.getWhitelist().contains(url.getHost())); HttpURLConnection conn; if (useProxy) { Proxy proxy = sysProxy; if (igvProxySettingsExist) { if (proxySettings.type == Proxy.Type.DIRECT) { if (PreferencesManager.getPreferences().getAsBoolean("DEBUG.PROXY")) { log.info("NO_PROXY"); } proxy = Proxy.NO_PROXY; } else { if (PreferencesManager.getPreferences().getAsBoolean("DEBUG.PROXY")) { log.info("PROXY " + proxySettings.proxyHost + " " + proxySettings.proxyPort); } proxy = new Proxy(proxySettings.type, new InetSocketAddress(proxySettings.proxyHost, proxySettings.proxyPort)); } } conn = (HttpURLConnection) url.openConnection(proxy); if (igvProxySettingsExist && proxySettings.auth && proxySettings.user != null && proxySettings.pw != null) { byte[] bytes = (proxySettings.user + ":" + proxySettings.pw).getBytes(); String encodedUserPwd = String.valueOf(Base64Coder.encode(bytes)); conn.setRequestProperty("Proxy-Authorization", "Basic " + encodedUserPwd); } } else { if (PreferencesManager.getPreferences().getAsBoolean("DEBUG.PROXY")) { log.info("PROXY NOT USED "); if (proxySettings.getWhitelist().contains(url.getHost())) { log.info(url.getHost() + " is whitelisted"); } ; } conn = (HttpURLConnection) url.openConnection(); } if (!"HEAD".equals(method)) { conn.setRequestProperty("Accept", "text/plain"); } conn.setConnectTimeout(Globals.CONNECT_TIMEOUT); conn.setReadTimeout(Globals.READ_TIMEOUT); conn.setRequestMethod(method); conn.setRequestProperty("Connection", "Keep-Alive"); // we'll handle redirects manually, allowing us to cache the new URL conn.setInstanceFollowRedirects(false); if (requestProperties != null) { for (Map.Entry<String, String> prop : requestProperties.entrySet()) { conn.setRequestProperty(prop.getKey(), prop.getValue()); } } Collection<String> headers = headerMap.get(url.getHost()); if (headers != null) { for (String h : headers) { String[] kv = h.split(":"); if (kv.length == 2) { conn.setRequestProperty(kv[0], kv[1]); } } } conn.setRequestProperty("User-Agent", Globals.applicationString()); // If this is a Google URL and we have an access token use it. if (GoogleUtils.isGoogleURL(url.toExternalForm())) { String token = OAuthUtils.getInstance().getProvider().getAccessToken(); if (token != null) { conn.setRequestProperty("Authorization", "Bearer " + token); } if (GoogleUtils.getProjectID() != null && GoogleUtils.getProjectID().length() > 0) { url = addQueryParameter(url, "userProject", GoogleUtils.getProjectID()); } } if (method.equals("PUT")) { return conn; } else { int code = conn.getResponseCode(); if (!isDropboxHost(url.getHost()) && requestProperties != null && requestProperties.containsKey("Range") && code == 200 && method.equals("GET")) { log.error("Range header removed by client or ignored by server for url: " + url.toString()); if (!SwingUtilities.isEventDispatchThread()) { MessageUtils.showMessage("Warning: unsuccessful attempt to execute 'Range byte' request to host " + url.getHost()); } byteRangeTestMap.put(url.getHost(), false); String[] positionString = requestProperties.get("Range").split("=")[1].split("-"); int length = Integer.parseInt(positionString[1]) - Integer.parseInt(positionString[0]) + 1; requestProperties.remove("Range"); // < VERY IMPORTANT URL wsUrl = HttpUtils.createURL(WEBSERVICE_URL + "?file=" + url.toExternalForm() + "&position=" + positionString[0] + "&length=" + length); return openConnection(wsUrl, requestProperties, "GET", redirectCount, retries); } if (log.isDebugEnabled()) { //logHeaders(conn); } // Redirects. These can occur even if followRedirects == true if there is a change in protocol, // for example http -> https. if (code >= 300 && code < 400) { if (redirectCount > MAX_REDIRECTS) { throw new IOException("Too many redirects"); } CachedRedirect cr = new CachedRedirect(); cr.url = new URL(conn.getHeaderField("Location")); if (cr.url != null) { cr.expires = ZonedDateTime.now().plusMinutes(DEFAULT_REDIRECT_EXPIRATION_MIN); String s; if ((s = conn.getHeaderField("Cache-Control")) != null) { // cache-control takes priority CacheControl cc = null; try { cc = CacheControl.valueOf(s); } catch (IllegalArgumentException e) { // use default } if (cc != null) { if (cc.isNoCache()) { // set expires to null, preventing caching cr.expires = null; } else if (cc.getMaxAge() > 0) { cr.expires = ZonedDateTime.now().plusSeconds(cc.getMaxAge()); } } } else if ((s = conn.getHeaderField("Expires")) != null) { // no cache-control header, so try "expires" next try { cr.expires = ZonedDateTime.parse(s); } catch (DateTimeParseException e) { // use default } } if (cr.expires != null) { redirectCache.put(url, cr); log.debug("Redirecting to " + cr.url); return openConnection(HttpUtils.createURL(cr.url.toString()), requestProperties, method, ++redirectCount, retries); } } } // TODO -- handle other response codes. else if (code >= 400) { String message; // TODO -- detect Google requestor pay failure if (code == 404) { message = "File not found: " + url.toString(); throw new FileNotFoundException(message); } else if (code == 401) { if (GoogleUtils.isGoogleURL(url.toExternalForm()) && retries == 0) { GoogleUtils.checkLogin(); return openConnection(url, requestProperties, method, redirectCount, ++retries); } message = "You must log in to access this file"; throw new HttpResponseException(code, message, ""); } else if (code == 403) { message = "Access forbidden"; throw new HttpResponseException(code, message, ""); } else if (code == 416) { throw new UnsatisfiableRangeException(conn.getResponseMessage()); } else { message = conn.getResponseMessage(); String details = readErrorStream(conn); if (url.getHost().equals("www.googleapis.com") && details.contains("requester pays bucket")) { MessageUtils.showMessage("<html>" + details + "<br>Use Google menu to set project."); } throw new HttpResponseException(code, message, details); } } } return conn; } private boolean isDropboxHost(String host) { return (host.equals("dl.dropboxusercontent.com") || host.equals("www.dropbox.com")); } private URL addQueryParameter(URL url, String userProject, String projectID) { String urlString = url.toExternalForm(); urlString = urlString + (urlString.contains("?") ? "&" : "?") + userProject + "=" + projectID; try { return new URL(urlString); } catch (MalformedURLException e) { log.error("Error adding query parameter", e); return url; } } //Used for testing sometimes, please do not delete private void logHeaders(HttpURLConnection conn) { Map<String, List<String>> headerFields = conn.getHeaderFields(); log.debug("Headers for " + conn.getURL()); for (Map.Entry<String, List<String>> header : headerFields.entrySet()) { log.debug(header.getKey() + ": " + StringUtils.join(header.getValue(), ",")); } } public void setDefaultPassword(String defaultPassword) { this.defaultPassword = defaultPassword.toCharArray(); } public void setDefaultUserName(String defaultUserName) { this.defaultUserName = defaultUserName; } public void clearDefaultCredentials() { this.defaultPassword = null; this.defaultUserName = null; } /** * Test to see if this client can successfully retrieve a portion of a remote file using the byte-range header. * This is not a test of the server, but the client. In some environments the byte-range header gets removed * by filters after the request is made by IGV. * * @return */ public boolean useByteRange(URL url) throws IOException { if (BYTE_RANGE_DISABLED) return false; // We can test byte-range success for hosts we can reach. synchronized (byteRangeTestMap) { final String host = url.getHost(); if (byteRangeTestMap.containsKey(host)) { return byteRangeTestMap.get(host); } else { SeekableStream str = null; try { boolean byteRangeTestSuccess = testByteRange(url); if (byteRangeTestSuccess) { log.info("Range-byte request succeeded"); } else { log.info("Range-byte test failed -- Host: " + host + " does not support range-byte requests or there is a problem with client network environment."); } byteRangeTestMap.put(host, byteRangeTestSuccess); return byteRangeTestSuccess; } finally { if (str != null) try { str.close(); } catch (IOException e) { log.error("Error closing stream (" + url.toExternalForm() + ")", e); } } } } } public boolean testByteRange(URL url) throws IOException { Map<String, String> params = new HashMap(); String byteRange = "bytes=" + 0 + "-" + 10; params.put("Range", byteRange); HttpURLConnection conn = HttpUtils.getInstance().openConnection(url, params); int statusCode = conn.getResponseCode(); boolean byteRangeTestSuccess = (statusCode == 206); readFully(conn.getInputStream(), new byte[10]); return byteRangeTestSuccess; } /** * Add an http header string to be applied the the specified URLs. Used to support command line specification * of authentication headers * * @param headers * @param urls */ public void addHeaders(Collection<String> headers, List<String> urls) { for (String u : urls) { if (isRemoteURL(u)) { try { URL url = new URL(mapURL(u)); headerMap.put(url.getHost(), headers); System.out.println("Added " + url.getHost() + " -> " + headers); } catch (MalformedURLException e) { log.error("Error parsing URL " + u, e); } } } } private String stripParameters(String url) { int idx = url.indexOf("?"); if (idx > 0) { return url.substring(0, idx); } else { return url; } } public void shutdown() { // Do any cleanup required here } public static class ProxySettings { boolean auth = false; String user; String pw; boolean useProxy; String proxyHost; int proxyPort = -1; Proxy.Type type; Set<String> whitelist; public ProxySettings(boolean useProxy, String user, String pw, boolean auth, String proxyHost, int proxyPort, Proxy.Type proxyType, Set<String> whitelist) { this.auth = auth; this.proxyHost = proxyHost; this.proxyPort = proxyPort; this.pw = pw; this.useProxy = useProxy; this.user = user; this.type = proxyType; this.whitelist = whitelist; } public Set<String> getWhitelist() { return whitelist; } } /** * The default authenticator */ public class IGVAuthenticator extends Authenticator { Hashtable<String, PasswordAuthentication> pwCache = new Hashtable<String, PasswordAuthentication>(); HashSet<String> cacheAttempts = new HashSet<String>(); /** * Called when password authentication is needed. * * @return */ @Override protected synchronized PasswordAuthentication getPasswordAuthentication() { RequestorType type = getRequestorType(); String urlString = getRequestingURL().toString(); boolean isProxyChallenge = type == RequestorType.PROXY; // Cache user entered PWs. In normal use this shouldn't be necessary as credentials are cached upstream, // but if loading many files in parallel (e.g. from sessions) calls to this method can queue up before the // user enters their credentials, causing needless reentry. String pKey = type.toString() + getRequestingProtocol() + getRequestingHost(); PasswordAuthentication pw = pwCache.get(pKey); if (pw != null) { // Prevents infinite loop if credentials are incorrect if (cacheAttempts.contains(urlString)) { cacheAttempts.remove(urlString); } else { cacheAttempts.add(urlString); return pw; } } if (isProxyChallenge) { if (proxySettings.auth && proxySettings.user != null && proxySettings.pw != null) { return new PasswordAuthentication(proxySettings.user, proxySettings.pw.toCharArray()); } } if (defaultUserName != null && defaultPassword != null) { return new PasswordAuthentication(defaultUserName, defaultPassword); } Frame owner = IGV.hasInstance() ? IGV.getMainFrame() : null; LoginDialog dlg = new LoginDialog(owner, urlString, isProxyChallenge); dlg.setVisible(true); if (dlg.isCanceled()) { return null; } else { final String userString = dlg.getUsername(); final char[] userPass = dlg.getPassword(); if (isProxyChallenge) { proxySettings.user = userString; proxySettings.pw = new String(userPass); } pw = new PasswordAuthentication(userString, userPass); pwCache.put(pKey, pw); return pw; } } } static boolean isExpectedRangeMissing(URLConnection conn, Map<String, String> requestProperties) { final boolean rangeRequested = (requestProperties != null) && (new CI.CIHashMap<String>(requestProperties)).containsKey("Range"); if (!rangeRequested) return false; Map<String, List<String>> headerFields = conn.getHeaderFields(); boolean rangeReceived = (headerFields != null) && (new CI.CIHashMap<List<String>>(headerFields)).containsKey("Content-Range"); return !rangeReceived; } /** * Provide override for unit tests */ public void setAuthenticator(Authenticator authenticator) { Authenticator.setDefault(authenticator); } /** * For unit tests */ public void resetAuthenticator() { Authenticator.setDefault(new IGVAuthenticator()); } /** * Useful helper function */ public static void readFully(InputStream is, byte b[]) throws IOException { int len = b.length; if (len < 0) { throw new IndexOutOfBoundsException(); } int n = 0; while (n < len) { int count = is.read(b, n, len - n); if (count < 0) { throw new EOFException(); } n += count; } } public class UnsatisfiableRangeException extends RuntimeException { String message; public UnsatisfiableRangeException(String message) { super(message); this.message = message; } } static class CacheControl { boolean noCache = false; long maxAge = 0; static CacheControl valueOf(String s) { CacheControl cc = new CacheControl(); String[] tokens = Globals.commaPattern.split(s); for (String t : tokens) { t = t.trim().toLowerCase(); if (t.startsWith("no-cache")) { cc.noCache = true; } else if (t.startsWith("max-age")) { String[] ma = Globals.equalPattern.split(t); cc.maxAge = Long.parseLong(ma[1].trim()); } } return cc; } private CacheControl() { } public boolean isNoCache() { return noCache; } public long getMaxAge() { return maxAge; } } }
package com.inspiron.tharun26.saaral15; import android.app.Notification; import android.app.NotificationManager; import android.app.PendingIntent; import android.content.Context; import android.content.Intent; import android.util.Log; import com.google.android.gcm.GCMBaseIntentService; import java.util.List; import static com.inspiron.tharun26.saaral15.CommonUtilities.SENDER_ID; import static com.inspiron.tharun26.saaral15.CommonUtilities.displayMessage; /** * Created by tharun26 on 25/1/15. */ public class GCMIntentService extends GCMBaseIntentService { private static final String TAG = "GCMIntentService"; public GCMIntentService() { super(SENDER_ID); } /** * Method called on device registered **/ @Override protected void onRegistered(Context context, String registrationId) { Log.i(TAG, "Device registered: regId = " + registrationId); displayMessage(context, "Your device registred with GCM"); // Log.d("NAME", MainActivity.name); ServerUtilities.register(context, MyActivity.name, MyActivity.email, registrationId); } /** * Method called on device un registred * */ @Override protected void onUnregistered(Context context, String registrationId) { Log.i(TAG, "Device unregistered"); displayMessage(context, getString(R.string.gcm_unregistered)); ServerUtilities.unregister(context, registrationId); } protected void storedb(String message) { DatabaseHandler db = new DatabaseHandler(this); Log.d("Insert: ", "Inserting .."); db.addNotification(new NotificationDb(message)); Log.d("Reading: ", "Reading all contacts.."); List<NotificationDb> contacts = db.getAllContacts(); for (NotificationDb cn : contacts) { // db.deleteContact(cn); String log = "Id: "+cn.getId()+" ,Name: " + cn.getNotification() ; // Writing Contacts to log Log.d("Name: ", log); } } /** * Method called on Receiving a new message * */ @Override protected void onMessage(Context context, Intent intent) { Log.i(TAG, "Received message"); String message = intent.getExtras().getString("price"); if(message==null) { message="Welcome to Saaral 2015"; } storedb(message); /*Planning to store it in a databse*/ /* DatabaseHandler db = new DatabaseHandler(this); Log.d("Insert: ", "Inserting .."); db.addNotification(new NotificationDb(message)); Log.d("Reading: ", "Reading all contacts.."); List<NotificationDb> contacts = db.getAllContacts(); for (NotificationDb cn : contacts) { // db.deleteContact(cn); String log = "Id: "+cn.getId()+" ,Name: " + cn.getNotification() ; // Writing Contacts to log Log.d("Name: ", log); } */ displayMessage(context, message); // notifies user generateNotification(context, message); } /** * Method called on receiving a deleted message * */ @Override protected void onDeletedMessages(Context context, int total) { Log.i(TAG, "Received deleted messages notification"); String message = getString(R.string.gcm_deleted, total); displayMessage(context, message); // notifies user generateNotification(context, message); } /** * Method called on Error * */ @Override public void onError(Context context, String errorId) { Log.i(TAG, "Received error: " + errorId); displayMessage(context, getString(R.string.gcm_error, errorId)); } @Override protected boolean onRecoverableError(Context context, String errorId) { // log message Log.i(TAG, "Received recoverable error: " + errorId); displayMessage(context, getString(R.string.gcm_recoverable_error, errorId)); return super.onRecoverableError(context, errorId); } /** * Issues a notification to inform the user that server has sent a message. */ int count=0; private void generateNotification(Context context, String message) { /* DatabaseHandler db = new DatabaseHandler(this); Log.d("Insert: ", "Inserting .."); db.addNotification(new NotificationDb(message)); Log.d("Reading: ", "Reading all contacts.."); List<NotificationDb> contacts = db.getAllContacts(); for (NotificationDb cn : contacts) { // db.deleteContact(cn); String log = "Id: " + cn.getId() + " ,Name: " + cn.getNotification(); // Writing Contacts to log Log.d("Name: ", log); } */ int icon = R.drawable.ic_logo; long when = System.currentTimeMillis(); NotificationManager notificationManager = (NotificationManager) context.getSystemService(Context.NOTIFICATION_SERVICE); Notification notification = new Notification(icon, message, when); String title = context.getString(R.string.app_name); Intent notificationIntent = new Intent(context, MyActivity.class); // set intent so it does not start a new activity notificationIntent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_SINGLE_TOP); PendingIntent intent = PendingIntent.getActivity(context, 0, notificationIntent, 0); notification.setLatestEventInfo(context, title, message, intent); notification.flags |= Notification.FLAG_AUTO_CANCEL; // Play default notification sound notification.defaults |= Notification.DEFAULT_SOUND; // Vibrate if vibrate is enabled notification.defaults |= Notification.DEFAULT_VIBRATE; notificationManager.notify(0, notification); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.jini.jeri; import org.apache.river.jeri.internal.runtime.BasicExportTable; import org.apache.river.logging.Levels; import java.lang.ref.WeakReference; import java.rmi.Remote; import java.rmi.RemoteException; import java.rmi.server.ExportException; import java.rmi.server.Unreferenced; import java.security.AccessControlContext; import java.security.PrivilegedAction; import java.util.logging.Level; import java.util.logging.Logger; import net.jini.config.Configuration; import net.jini.export.Exporter; import net.jini.export.ServerContext; import net.jini.id.Uuid; import net.jini.id.UuidFactory; import net.jini.io.MarshalInputStream; import net.jini.io.context.ClientHost; import net.jini.io.context.ClientSubject; import net.jini.security.Security; import net.jini.security.SecurityContext; /** * An <code>Exporter</code> implementation for exporting * a remote object to use Jini extensible remote invocation * (Jini ERI). Typically, instances of this class should be * obtained from a {@link Configuration} rather than being explicitly * constructed. * Each instance of <code>BasicJeriExporter</code> * can export only a single remote object. * * <p>The following properties (defined during construction) govern * invocation behavior and other characteristics of the exported remote * object and its proxy: * <ul> * <li>{@link ServerEndpoint}: the <code>ServerEndpoint</code> over * which calls are accepted. * * <li><p>{@link InvocationLayerFactory}: a factory used to obtain the remote * object's proxy and invocation dispatcher. * * <li><p><i>enableDGC</i> flag: if <code>true</code>, distributed * garbage collection (DGC) is enabled for the exported remote object, * and the {@link BasicObjectEndpoint} produced by this exporter * participates in DGC and thus constitutes a strong remote reference * to the remote object; if <code>false</code>, DGC is not enabled for * the remote object, and the <code>BasicObjectEndpoint</code> does * not participate in DGC and thus is a weak remote reference, so it * does not prevent the remote object from being garbage collected. * * <li><p><i>keepAlive</i> flag: if <code>true</code>, the virtual * machine is kept alive (with a non-daemon thread) while the remote * object remains exported via this exporter. * * <li><p>{@link Uuid Uuid}: the object identifier to use for the * remote object; if <code>null</code>, a unique object identifier is * chosen for the remote object using {@link UuidFactory#generate * UuidFactory.generate}. * </ul> * * <p>If DGC is not enabled for a remote object, then the * implementation always only weakly references the remote object. If * DGC is enabled for a remote object, then the implementation weakly * references the remote object when its referenced set is empty and * strongly references the remote object when its referenced set is * not empty (see below). If the implementation weakly references the * remote object and the weak reference is cleared, the remote object * becomes effectively unexported. * </p> * <p>Enabling DGC is not advisable in some circumstances. DGC should * not be enabled for a remote object exported with a well known * object identifier. Enabling DGC with a secure remote object is * generally discouraged, because DGC communication is always made in * a client-side context in which there are no client constraints and * no client subject, so it can leave the remote object open to denial * of service attacks. Some transport providers may not support making * requests without a client subject, so even if DGC is enabled in the * case where such a transport provider is used, DGC will be effectively * disabled on the client side. * </p> * <p>Multiple remote objects can be exported on the same server endpoint, * and the same remote object can be exported multiple times on different * server endpoints with the only restriction being that a given pair of * object identifier and listen endpoint (derived from the server endpoint) * can only have one active export at any given time. * </p> * <p>Two instances of this class are equal only if they are references to * the same (<code>==</code>) object. * </p> * <p>The server endpoint is not transmitted in the remote reference; only the * derived client endpoint is transmitted. * </p> * <p>Remote objects exported with instances of this class can call {@link * ServerContext#getServerContextElement * ServerContext.getServerContextElement}, passing the class {@link * ClientSubject} to obtain the authenticated identity of the client (if * any) for an incoming remote call, or passing the class {@link * ClientHost} to obtain the address of the client host. * </p> * <p>For remote objects exported with instances of this class, there is no * automatic replacement of the proxy for the remote object during * marshalling; either the proxy must be passed explicitly, or the remote * object implementation class must be serializable and have a * <code>writeReplace</code> method that returns its proxy. * </p> * <h1>Distributed Garbage Collection</h1> * <p> * The <code>BasicJeriExporter</code> class acts as the server-side * DGC implementation for all remote objects exported with DGC enabled * using its instances. * </p> * <p>An entity known as the <i>DGC client</i> tracks the existence * and reachability of live remote references * (<code>BasicObjectEndpoint</code> instances that participate in * DGC) for a <code>BasicObjectEndpoint</code> class in some * (potentially) remote virtual machine. A DGC client is identified * by a universally unique identifier (a <code>Uuid</code>). A DGC * client sends <i>dirty calls</i> and <i>clean calls</i> to the * {@link Endpoint} of a live remote reference to inform the * server-side DGC implementation when the number of live remote * references to a given remote object it is tracking increases from * zero to greater than zero and decreases from greater than zero to * zero, respectively. A DGC client also sends dirty calls to the * <code>Endpoint</code> of live remote references it is tracking to * renew its lease. The client-side behavior of dirty and clean calls * is specified by {@link BasicObjectEndpoint}. * * <p>On the server side, for every remote object exported with DGC * enabled, the implementation maintains a <i>referenced set</i>, * which contains the <code>Uuid</code>s of the DGC clients that are * known to have live remote references to the remote object. The * contents of the referenced set are modified as a result of dirty * calls, clean calls, and expiration of leases (see below). While * the referenced set is not empty, the implementation strongly * references the remote object, so that it will not be garbage * collected. While the referenced set is empty, the implementation * only weakly references the remote object, so that it may be garbage * collected (if it is not otherwise strongly reachable locally). If * a remote object is garbage collected, it becomes effectively * unexported. If a remote object that is an instance of {@link * Unreferenced} is exported with DGC enabled, then whenever the size * of its referenced set transitions from greater than zero to zero, * its {@link Unreferenced#unreferenced unreferenced} method will be * invoked (before the strong reference is dropped). Note that a * referenced set spans multiple exports of the same (identical) * remote object with <code>BasicJeriExporter</code>. * * <p>For every <code>RequestDispatcher</code> passed by * <code>BasicJeriExporter</code> to a <code>ListenEndpoint</code> as * part of exporting, whenever it has at least one remote object * exported with DGC enabled, it also has an implicitly exported * remote object that represents the server-side DGC implementation. * This remote object is effectively exported with an object * identifier of <code>d32cd1bc-273c-11b2-8841-080020c9e4a1</code> and * an <code>InvocationDispatcher</code> that behaves like a {@link * BasicInvocationDispatcher} with no server constraints, with a * {@link BasicInvocationDispatcher#createMarshalInputStream * createMarshalInputStream} implementation that returns a {@link * MarshalInputStream} that ignores codebase annotations, and with * support for at least the following remote methods: * * <pre> * long dirty(Uuid clientID, long sequenceNum, Uuid[] ids) * throws {@link RemoteException}; * * void clean(Uuid clientID, long sequenceNum, Uuid[] ids, boolean strong) * throws RemoteException; * </pre> * * <code>clientID</code> identifies the DGC client that is making the * dirty or clean call, and <code>sequenceNum</code> identifies the * sequence number of the dirty or clean call with respect to other * dirty and clean calls from the same DGC client. The sequence * numbers identify the correct order of semantic interpretation of * dirty and clean calls from the same DGC client, regardless of the * order in which they arrive. The well-known object identifier for * the server-side DGC implementation is reserved; attempting to * export any other remote object with that object identifier always * throws an {@link ExportException}. * * <p>A dirty call is processed as follows: * * <ul> * * <li>It establishes or renews the DGC lease for the identified DGC * client. The duration of the granted lease, which is chosen by the * implementation, is conveyed as the value returned by the dirty * call, in milliseconds starting from the some time during the * processing of the dirty call. While the lease for a DGC client is * valid (not expired), the DGC client is preserved in referenced sets * of exported remote objects. * * <li><p>It adds the DGC client's <code>Uuid</code> to the referenced * sets of the exported remote objects identified by <code>ids</code>, * if any, so that they are prevented from being garbage collected. * For each <code>Uuid</code> in <code>ids</code>: * * <blockquote> * * The identified remote object is the remote object exported with * that <code>Uuid</code> on the <code>ListenEndpoint</code> (and thus * <code>RequestDispatcher</code>) that the dirty call was received * on. If no such exported remote object exists (for example, if it * has been garbage collected), then that <code>Uuid</code> in * <code>ids</code> is ignored. If the sequence number is less than * the last recorded sequence number of a dirty or clean call for the * identified remote object from the same DGC client, then the remote * object's referenced set is not modified. Otherwise, the DGC * client's <code>Uuid</code> is added to the remote object's * referenced set (if not already present). If this addition causes * the referenced set to transition from empty to non-empty, then the * implementation starts strongly referencing the remote object. * * </blockquote> * * </ul> * * <p>A clean call is processed as follows: * * <ul> * * <li>It removes the DGC client's <code>Uuid</code> from the * referenced sets of the exported remote objects identified by * <code>ids</code>, so that they are not prevented from being garbage * collected by the given DGC client. For each <code>Uuid</code> in * <code>ids</code>: * * <blockquote> * * <p>The identified remote object is the remote object exported with * that <code>Uuid</code> on the <code>ListenEndpoint</code> (and thus * <code>RequestDispatcher</code>) that the dirty call was received * on. If no such exported remote object exists (for example, if it * has been garbage collected), then that <code>Uuid</code> in * <code>ids</code> is ignored. If the sequence number is less then * the last recorded sequence number of a dirty or clean call for the * identified remote object from the same DGC client, then the remote * object's referenced set is not modified. Otherwise, the DGC * client's <code>Uuid</code> is removed from the remote object's * referenced set (if it is present). If this removal causes the * referenced set to transition from non-empty to empty, then the * implementation starts only weakly referencing the remote object * (and before doing so, if the remote object is an instance of * <code>Unreferenced</code>, its <code>unreferenced</code> method is * invoked). If <code>strong</code> is <code>true</code>, then a * record is kept of the specified sequence number from the DGC client * for some reasonable period of time, in the event of a dirty call * that might arrive later with a lower sequence number. * * </blockquote> * * </ul> * * <p>If the implementation detects that the most recently granted DGC * lease for a given DGC client has expired, then it assumes that the * DGC client has abnormally terminated, and the DGC client's * <code>Uuid</code> is removed from the referenced sets of all * exported remote objects. If this removal causes a referenced set * to transition from non-empty to empty, then the implementation * starts only weakly referencing the corresponding remote object (and * before doing so, if the remote object is an instance of * <code>Unreferenced</code>, its <code>unreferenced</code> method is * invoked). * * <p>Unexporting a remote object with a * <code>BasicJeriExporter</code> causes the removal of DGC client * <code>Uuid</code>s from the remote object's referenced set that * were only present because of dirty calls that were received as a * result of exporting it with that <code>BasicJeriExporter</code>. * If the remote object remains exported with DGC enabled by another * <code>BasicJeriExporter</code> and this removal causes the * referenced set to transition from non-empty to empty, then the * implementation starts only weakly referencing the remote object * (and before doing so, if the remote object is an instance of * <code>Unreferenced</code>, its <code>unreferenced</code> method is * invoked). * * <p>When the implementation invokes a remote object's * <code>unreferenced</code> method, it does so with the security * context and context class loader in effect when the remote object * was exported. If the remote object is currently exported more than * once, then the security context and context class loader in effect * for any one of those exports are used. * * @author Sun Microsystems, Inc. * @since 2.0 * * * * <p>This implementation uses the {@link Logger} named * <code>net.jini.jeri.BasicJeriExporter</code> to log * information at the following levels: * * <table summary="Describes what is logged by BasicJeriExporter at various * logging levels" border=1 cellpadding=5> * * <tr> <th> Level <th> Description * * <tr> <td> {@link Levels#FAILED FAILED} <td> incoming request for * unrecognized object identifier (no such object) * * <tr> <td> {@link Levels#FAILED FAILED} <td> I/O exception * dispatching incoming request * * <tr> <td> {@link Level#FINE FINE} <td> successful export of object * * <tr> <td> {@link Level#FINE FINE} <td> attempted unexport of object * * <tr> <td> {@link Level#FINE FINE} <td> garbage collection of * exported object * * <tr> <td> {@link Level#FINE FINEST} <td> detailed implementation * activity * * </table> **/ public final class BasicJeriExporter implements Exporter { private static final Logger logger = Logger.getLogger("net.jini.jeri.BasicJeriExporter"); private final ServerEndpoint se; private final InvocationLayerFactory ilf; private final boolean enableDGC; private final boolean keepAlive; private final Uuid id; private boolean used = false; private BasicExportTable.Entry entry; private WeakReference weakImplContainer = null; private static final BasicExportTable table = new BasicExportTable(); /** * Creates a new <code>BasicJeriExporter</code> with the given server * endpoint and invocation layer factory. The other properties of the * exporter default as follows: the <code>enableDGC</code> flag is * <code>false</code>, the <code>keepAlive</code> flag is * <code>true</code>, and the object identifier is chosen by invoking * {@link UuidFactory#generate UuidFactory.generate}. * * @param se the server endpoint over which calls may be accepted * @param ilf the factory for creating the remote object's * proxy and invocation dispatcher * @throws NullPointerException if <code>se</code> or <code>ilf</code> * is <code>null</code> **/ public BasicJeriExporter(ServerEndpoint se, InvocationLayerFactory ilf) { this(se, ilf, false, true); } /** * Creates a new <code>BasicJeriExporter</code> with the given server * endpoint, invocation layer factory, <code>enableDGC</code> flag, and * <code>keepAlive</code> flag. The object identifier is chosen by * invoking {@link UuidFactory#generate UuidFactory.generate}. * * @param se the server endpoint over which calls may be accepted * @param ilf the factory for creating the remote object's * proxy and invocation dispatcher * @param enableDGC if <code>true</code>, DGC is enabled to the object * on this server endpoint * @param keepAlive if <code>true</code>, the VM is kept alive * while the object (exported via this exporter) remains * exported * @throws NullPointerException if <code>se</code> or <code>ilf</code> * is <code>null</code> **/ public BasicJeriExporter(ServerEndpoint se, InvocationLayerFactory ilf, boolean enableDGC, boolean keepAlive) { this(se, ilf, enableDGC, keepAlive, null); } /** * Creates a new <code>BasicJeriExporter</code> with the given server * endpoint, invocation layer factory, <code>enableDGC</code> flag, * <code>keepAlive</code> flag, and object identifier. If * <code>id</code> is <code>null</code>, the object identifier is * chosen by invoking {@link UuidFactory#generate * UuidFactory.generate}. * * @param se the server endpoint over which calls may be accepted * @param ilf the factory for creating the remote object's proxy * and invocation dispatcher * @param enableDGC if <code>true</code>, DGC is enabled to the object * on this server endpoint * @param keepAlive if <code>true</code>, the VM is kept alive * while the object (exported via this exporter) remains * exported * @param id an object identifier or <code>null</code> * @throws NullPointerException if <code>se</code> or <code>ilf</code> * is <code>null</code> **/ public BasicJeriExporter(ServerEndpoint se, InvocationLayerFactory ilf, boolean enableDGC, boolean keepAlive, Uuid id) { if (se == null || ilf == null) { throw new NullPointerException(); } this.se = se; this.ilf = ilf; this.id = ((id == null) ? UuidFactory.generate() : id); this.enableDGC = enableDGC; this.keepAlive = keepAlive; } /** * Returns the server endpoint for this exporter. * * @return the server endpoint **/ public ServerEndpoint getServerEndpoint() { return se; } /** * Returns the <code>InvocationLayerFactory</code> for this * exporter. * * @return the factory **/ public InvocationLayerFactory getInvocationLayerFactory() { return ilf; } /** * Returns <code>true</code> if DGC is enabled on the server endpoint to * the object corresponding to this exporter; otherwise * returns <code>false</code>. * * @return <code>true</code> if DGC is enabled; * <code>false</code> otherwise **/ public boolean getEnableDGC() { return enableDGC; } /** * Returns <code>true</code> if the virtual machine is kept alive while * the object corresponding to this exporter is exported; otherwise * returns <code>false</code>. * * @return <code>true</code> if VM is kept alive while object is * exported; <code>false</code> otherwise **/ public boolean getKeepAlive() { return keepAlive; } /** * Returns the object identifier for this exporter. * * @return the object identifier **/ public Uuid getObjectIdentifier() { return id; } /** * Exports the specified remote object and returns a proxy for the * remote object. This method cannot be called more than once to * export a remote object or an <code>IllegalStateException</code> will * be thrown. * * <p>A {@link BasicObjectEndpoint} instance is created with the object * identifier of this exporter, the {@link Endpoint} obtained from * listening on the server endpoint (see below), and the * <code>enableDGC</code> flag of this exporter. * * <p>The client <code>Endpoint</code> for the * <code>BasicObjectEndpoint</code> is obtained by invoking {@link * ServerEndpoint#enumerateListenEndpoints enumerateListenEndpoints} on * the server endpoint with a {@link ServerEndpoint.ListenContext} * whose {@link ServerEndpoint.ListenContext#addListenEndpoint * addListenEndpoint} method is implemented as follows: <ul> * * <li>Invokes {@link ServerEndpoint.ListenEndpoint#checkPermissions * checkPermissions} on the supplied listen endpoint. * * <li>If the supplied listen endpoint has the same class and is equal * to another listen endpoint that has already been listened on, * returns the {@link ServerEndpoint.ListenCookie} corresponding to the * previous <code>listen</code> operation. Otherwise, it creates a * {@link RequestDispatcher} to handle inbound requests dispatched by * the listen endpoint, invokes {@link * ServerEndpoint.ListenEndpoint#listen listen} on the listen endpoint * (passing the request dispatcher) within an action passed to the * {@link Security#doPrivileged Security.doPrivileged} method, and * returns the <code>ServerEndpoint.ListenCookie</code> obtained by * invoking {@link ServerEndpoint.ListenHandle#getCookie getCookie} on * the {@link ServerEndpoint.ListenHandle} returned from the * <code>listen</code> invocation. * </ul> * * <p>A <code>RequestDispatcher</code> for a listen endpoint handles a * dispatched inbound request (when its {@link * RequestDispatcher#dispatch dispatch} method is invoked) as follows. * The request dispatcher reads the object identifer of the target * object being invoked by invoking {@link UuidFactory#read * UuidFactory.read} on the request input stream of the inbound * request. If no exported object corresponds to the object identifier * read, it closes the request input stream, writes <code>0x00</code> * to the response output stream, and closes the response output * stream. Otherwise, it writes <code>0x01</code> to the response * output stream, and invokes the {@link InvocationDispatcher#dispatch * dispatch} method on the invocation dispatcher passing the target * object, the inbound request, and the server context collection (see * below). * * <p>A proxy and an invocation dispatcher are created by * calling the {@link InvocationLayerFactory#createInstances * createInstances} method of this exporter's invocation layer factory, * passing the remote object, the <code>BasicObjectEndpoint</code>, and * the server endpoint (as the {@link ServerCapabilities}). The proxy * is returned by this method. The invocation dispatcher is called for * each incoming remote call to this exporter's object identifier * received from this exporter's server endpoint, passing the remote * object and the {@link InboundRequest} received from the transport * layer. * * <p>Each call to the invocation dispatcher's {@link * InvocationDispatcher#dispatch dispatch} method is invoked with * the following thread context: * <ul> * <li><code>dispatch</code> is invoked in a {@link * PrivilegedAction} wrapped by a {@link SecurityContext} * obtained when this method was invoked, with the {@link * AccessControlContext} of that <code>SecurityContext</code> * in effect. * <li>The context class loader is the context class loader * in effect when this method was invoked. * <li>Each call to the dispatcher is made using {@link * ServerContext#doWithServerContext * ServerContext.doWithServerContext} with a server context * collection that is an unmodifiable view of the context * collection populated by invoking the {@link * InboundRequest#populateContext populateContext} method on the * inbound request passing a modifiable collection. The invocation * dispatcher's {@link InvocationDispatcher#dispatch dispatch} * method is invoked with the <code>impl</code>, the inbound * request, and that modifiable server context collection. * </ul> * * <p>There is no replacement of the proxy for the implementation * object during marshalling; either the proxy must be passed * explicitly in a remote call, or the implementation class must be * serializable and have a <code>writeReplace</code> method that * returns the proxy. * * @throws ExportException if an object is already exported * with the same object identifier and server endpoint, or * the invocation layer factory cannot create a proxy or * invocation dispatcher, or some other problem occurs while * exporting the object * @throws NullPointerException {@inheritDoc} * @throws IllegalStateException {@inheritDoc} * @throws SecurityException if invoking the * <code>checkPermissions</code> method on any of the listen * endpoints throws a <code>SecurityException</code> **/ @Override public synchronized Remote export(Remote impl) throws ExportException { /* * Check if object is already exported; disallow exporting more * than once via this exporter. */ if (used) { throw new IllegalStateException( "object already exported via this exporter"); } assert (entry == null); // ()s to work around javadoc bug /* * Export the remote object. */ entry = table.export(impl, se, enableDGC, keepAlive, id); used = true; /* * Create proxy and invocation dispatcher for the remote object. * * (Use package-private BasicObjectEndpoint constructor to suppress * DGC activity for this local live reference and to keep the impl * strongly referenced through it.) */ Remote proxy; InvocationLayerFactory.Instances inst = null; try { ImplContainer implContainer = new ImplContainer(impl); weakImplContainer = new WeakReference(implContainer); ObjectEndpoint oe = new BasicObjectEndpoint(entry.getEndpoint(), id, enableDGC, implContainer); inst = ilf.createInstances(impl, oe, se); entry.setInvocationDispatcher(inst.getInvocationDispatcher()); proxy = inst.getProxy(); if (logger.isLoggable(Level.FINE)) { logger.log(Level.FINE, "export of {0} via {1} returns proxy {2}", new Object[]{ impl, this, proxy }); } } finally { if (inst == null) { unexport(true); } } return proxy; } /** * Unexports the remote object exported via the exporter's * {@link #export export} method such that incoming remote calls * to the object identifier in this exporter are no longer accepted * through the server endpoint in this exporter. * * <p>If <code>force</code> is <code>true</code>, the object * is forcibly unexported even if there are pending or in-progress remote * calls to the object identifier through the server endpoint. If * <code>force</code> is <code>false</code>, the object is only * unexported if there are no pending or in-progress remote calls to the * object identifier through the server endpoint. * * <p>The return value is <code>true</code> if the object is (or was * previously) unexported, and <code>false</code> if the object is still * exported. * * @throws IllegalStateException {@inheritDoc} **/ public synchronized boolean unexport(boolean force) { if (!used) { throw new IllegalStateException( "no object exported via this exporter"); } if (entry != null && entry.unexport(force)) { entry = null; ImplContainer implContainer = (ImplContainer) weakImplContainer.get(); if (implContainer != null) { implContainer.clearImpl(); } } boolean result = entry == null; if (logger.isLoggable(Level.FINE)) { logger.log(Level.FINE, "unexport on {0} returns {1}", new Object[]{ this, Boolean.valueOf(result) }); } return result; } /** * Returns the string representation for this exporter. * * @return the string representation for this exporter **/ public String toString() { return "BasicJeriExporter[" + se + "," + id + "]"; } /** * Container for an impl object. * * BasicJeriExporter, when exporting an impl, passes an impl container * to the package-private BasicObjectEndpoint constructor so that the * BasicObjectEndpoint can reference the impl strongly (through the * container) while the object is exported. The BasicJeriExporter * instance holds onto the impl container weakly so it won't prevent * the impl from being garbage collected; only the local stub that * references the BasicObjectEndpoint will prevent the impl from being * garbage collected. * * If the object is explicitly unexported via BasicJeriExporter, the * BasicJeriExporter instance clears the impl field (if the container * hasn't been garbage collected) so a reachable stub that references * the container (via the BasicObjectEndpoint in the stub) will not * prevent the impl from being garbage collected. **/ private static class ImplContainer { private Object impl; ImplContainer(Object impl) { this.impl = impl; } void clearImpl() { impl = null; } } }
/* * Copyright (C) 2011 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.util.concurrent; import static java.util.concurrent.TimeUnit.NANOSECONDS; import com.google.common.annotations.Beta; import com.google.common.base.Preconditions; import java.util.concurrent.BlockingQueue; import java.util.concurrent.CancellationException; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; /** * Utilities for treating interruptible operations as uninterruptible. * In all cases, if a thread is interrupted during such a call, the call * continues to block until the result is available or the timeout elapses, * and only then re-interrupts the thread. * * @author Anthony Zana * @since 10.0 */ @Beta public final class Uninterruptibles { // Implementation Note: As of 3-7-11, the logic for each blocking/timeout // methods is identical, save for method being invoked. /** * Invokes {@code latch.}{@link CountDownLatch#await() await()} * uninterruptibly. */ public static void awaitUninterruptibly(CountDownLatch latch) { boolean interrupted = false; try { while (true) { try { latch.await(); return; } catch (InterruptedException e) { interrupted = true; } } } finally { if (interrupted) { Thread.currentThread().interrupt(); } } } /** * Invokes * {@code latch.}{@link CountDownLatch#await(long, TimeUnit) * await(timeout, unit)} uninterruptibly. */ public static boolean awaitUninterruptibly(CountDownLatch latch, long timeout, TimeUnit unit) { boolean interrupted = false; try { long remainingNanos = unit.toNanos(timeout); long end = System.nanoTime() + remainingNanos; while (true) { try { // CountDownLatch treats negative timeouts just like zero. return latch.await(remainingNanos, NANOSECONDS); } catch (InterruptedException e) { interrupted = true; remainingNanos = end - System.nanoTime(); } } } finally { if (interrupted) { Thread.currentThread().interrupt(); } } } /** * Invokes {@code toJoin.}{@link Thread#join() join()} uninterruptibly. */ public static void joinUninterruptibly(Thread toJoin) { boolean interrupted = false; try { while (true) { try { toJoin.join(); return; } catch (InterruptedException e) { interrupted = true; } } } finally { if (interrupted) { Thread.currentThread().interrupt(); } } } /** * Invokes {@code future.}{@link Future#get() get()} uninterruptibly. * To get uninterruptibility and remove checked exceptions, see * {@link Futures#getUnchecked}. * * <p>If instead, you wish to treat {@link InterruptedException} uniformly * with other exceptions, see {@link Futures#get(Future, Class) Futures.get} * or {@link Futures#makeChecked}. * * @throws ExecutionException if the computation threw an exception * @throws CancellationException if the computation was cancelled */ public static <V> V getUninterruptibly(Future<V> future) throws ExecutionException { boolean interrupted = false; try { while (true) { try { return future.get(); } catch (InterruptedException e) { interrupted = true; } } } finally { if (interrupted) { Thread.currentThread().interrupt(); } } } /** * Invokes * {@code future.}{@link Future#get(long, TimeUnit) get(timeout, unit)} * uninterruptibly. * * <p>If instead, you wish to treat {@link InterruptedException} uniformly * with other exceptions, see {@link Futures#get(Future, Class) Futures.get} * or {@link Futures#makeChecked}. * * @throws ExecutionException if the computation threw an exception * @throws CancellationException if the computation was cancelled * @throws TimeoutException if the wait timed out */ public static <V> V getUninterruptibly( Future<V> future, long timeout, TimeUnit unit) throws ExecutionException, TimeoutException { boolean interrupted = false; try { long remainingNanos = unit.toNanos(timeout); long end = System.nanoTime() + remainingNanos; while (true) { try { // Future treats negative timeouts just like zero. return future.get(remainingNanos, NANOSECONDS); } catch (InterruptedException e) { interrupted = true; remainingNanos = end - System.nanoTime(); } } } finally { if (interrupted) { Thread.currentThread().interrupt(); } } } /** * Invokes * {@code unit.}{@link TimeUnit#timedJoin(Thread, long) * timedJoin(toJoin, timeout)} uninterruptibly. */ public static void joinUninterruptibly(Thread toJoin, long timeout, TimeUnit unit) { Preconditions.checkNotNull(toJoin); boolean interrupted = false; try { long remainingNanos = unit.toNanos(timeout); long end = System.nanoTime() + remainingNanos; while (true) { try { // TimeUnit.timedJoin() treats negative timeouts just like zero. NANOSECONDS.timedJoin(toJoin, remainingNanos); return; } catch (InterruptedException e) { interrupted = true; remainingNanos = end - System.nanoTime(); } } } finally { if (interrupted) { Thread.currentThread().interrupt(); } } } /** * Invokes {@code queue.}{@link BlockingQueue#take() take()} uninterruptibly. */ public static <E> E takeUninterruptibly(BlockingQueue<E> queue) { boolean interrupted = false; try { while (true) { try { return queue.take(); } catch (InterruptedException e) { interrupted = true; } } } finally { if (interrupted) { Thread.currentThread().interrupt(); } } } /** * Invokes {@code queue.}{@link BlockingQueue#put(Object) put(element)} * uninterruptibly. * * @throws ClassCastException if the class of the specified element prevents * it from being added to the given queue * @throws IllegalArgumentException if some property of the specified element * prevents it from being added to the given queue */ public static <E> void putUninterruptibly(BlockingQueue<E> queue, E element) { boolean interrupted = false; try { while (true) { try { queue.put(element); return; } catch (InterruptedException e) { interrupted = true; } } } finally { if (interrupted) { Thread.currentThread().interrupt(); } } } // TODO(user): Support Sleeper somehow (wrapper or interface method)? /** * Invokes {@code unit.}{@link TimeUnit#sleep(long) sleep(sleepFor)} * uninterruptibly. */ public static void sleepUninterruptibly(long sleepFor, TimeUnit unit) { boolean interrupted = false; try { long remainingNanos = unit.toNanos(sleepFor); long end = System.nanoTime() + remainingNanos; while (true) { try { // TimeUnit.sleep() treats negative timeouts just like zero. NANOSECONDS.sleep(remainingNanos); return; } catch (InterruptedException e) { interrupted = true; remainingNanos = end - System.nanoTime(); } } } finally { if (interrupted) { Thread.currentThread().interrupt(); } } } /** * Invokes {@code semaphore.}{@link Semaphore#tryAcquire(int, long, TimeUnit) * tryAcquire(1, timeout, unit)} uninterruptibly. * * @since 18.0 */ public static boolean tryAcquireUninterruptibly( Semaphore semaphore, long timeout, TimeUnit unit) { return tryAcquireUninterruptibly(semaphore, 1, timeout, unit); } /** * Invokes {@code semaphore.}{@link Semaphore#tryAcquire(int, long, TimeUnit) * tryAcquire(permits, timeout, unit)} uninterruptibly. * * @since 18.0 */ public static boolean tryAcquireUninterruptibly( Semaphore semaphore, int permits, long timeout, TimeUnit unit) { boolean interrupted = false; try { long remainingNanos = unit.toNanos(timeout); long end = System.nanoTime() + remainingNanos; while (true) { try { // Semaphore treats negative timeouts just like zero. return semaphore.tryAcquire(permits, remainingNanos, NANOSECONDS); } catch (InterruptedException e) { interrupted = true; remainingNanos = end - System.nanoTime(); } } } finally { if (interrupted) { Thread.currentThread().interrupt(); } } } // TODO(user): Add support for waitUninterruptibly. private Uninterruptibles() {} }
// Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.chrome.browser.crash; import android.os.Build; import android.util.Patterns; import org.chromium.base.ContextUtils; import org.chromium.base.Log; import org.chromium.base.VisibleForTesting; import org.chromium.components.minidump_uploader.CrashFileManager; import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.io.InputStreamReader; import java.util.ArrayList; import java.util.Collections; import java.util.LinkedList; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * Extracts the recent logcat output from an Android device, elides PII sensitive info from it, * prepends the logcat data to the caller-provided minidump file, and initiates upload for the crash * report. * * Elided information includes: Emails, IP address, MAC address, URL/domains as well as Javascript * console messages. */ public class LogcatExtractionRunnable implements Runnable { private static final String TAG = "LogcatExtraction"; private static final long HALF_SECOND = 500; protected static final int LOGCAT_SIZE = 256; // Number of lines. protected static final String EMAIL_ELISION = "XXX@EMAIL.ELIDED"; @VisibleForTesting protected static final String URL_ELISION = "HTTP://WEBADDRESS.ELIDED"; private static final String GOOD_IRI_CHAR = "a-zA-Z0-9\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF"; private static final Pattern IP_ADDRESS = Pattern.compile( "((25[0-5]|2[0-4][0-9]|[0-1][0-9]{2}|[1-9][0-9]|[1-9])\\.(25[0-5]|2[0-4]" + "[0-9]|[0-1][0-9]{2}|[1-9][0-9]|[1-9]|0)\\.(25[0-5]|2[0-4][0-9]|[0-1]" + "[0-9]{2}|[1-9][0-9]|[1-9]|0)\\.(25[0-5]|2[0-4][0-9]|[0-1][0-9]{2}" + "|[1-9][0-9]|[0-9]))"); private static final String IRI = "[" + GOOD_IRI_CHAR + "]([" + GOOD_IRI_CHAR + "\\-]{0,61}[" + GOOD_IRI_CHAR + "]){0,1}"; private static final String GOOD_GTLD_CHAR = "a-zA-Z\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF"; private static final String GTLD = "[" + GOOD_GTLD_CHAR + "]{2,63}"; private static final String HOST_NAME = "(" + IRI + "\\.)+" + GTLD; private static final Pattern DOMAIN_NAME = Pattern.compile("(" + HOST_NAME + "|" + IP_ADDRESS + ")"); private static final Pattern WEB_URL = Pattern.compile("(?:\\b|^)((?:(http|https|Http|Https|rtsp|Rtsp):" + "\\/\\/(?:(?:[a-zA-Z0-9\\$\\-\\_\\.\\+\\!\\*\\'\\(\\)" + "\\,\\;\\?\\&\\=]|(?:\\%[a-fA-F0-9]{2})){1,64}(?:\\:(?:[a-zA-Z0-9\\$\\-\\_" + "\\.\\+\\!\\*\\'\\(\\)\\,\\;\\?\\&\\=]|(?:\\%[a-fA-F0-9]{2})){1,25})?\\@)?)?" + "(?:" + DOMAIN_NAME + ")" + "(?:\\:\\d{1,5})?)" + "(\\/(?:(?:[" + GOOD_IRI_CHAR + "\\;\\/\\?\\:\\@\\&\\=\\#\\~" + "\\-\\.\\+\\!\\*\\'\\(\\)\\,\\_])|(?:\\%[a-fA-F0-9]{2}))*)?" + "(?:\\b|$)"); @VisibleForTesting protected static final String BEGIN_MICRODUMP = "-----BEGIN BREAKPAD MICRODUMP-----"; @VisibleForTesting protected static final String END_MICRODUMP = "-----END BREAKPAD MICRODUMP-----"; @VisibleForTesting protected static final String SNIPPED_MICRODUMP = "-----SNIPPED OUT BREAKPAD MICRODUMP FOR THIS CRASH-----"; @VisibleForTesting protected static final String IP_ELISION = "1.2.3.4"; @VisibleForTesting protected static final String MAC_ELISION = "01:23:45:67:89:AB"; @VisibleForTesting protected static final String CONSOLE_ELISION = "[ELIDED:CONSOLE(0)] ELIDED CONSOLE MESSAGE"; private static final Pattern MAC_ADDRESS = Pattern.compile("([0-9a-fA-F]{2}[-:]+){5}[0-9a-fA-F]{2}"); private static final Pattern CONSOLE_MSG = Pattern.compile("\\[\\w*:CONSOLE.*\\].*"); private static final String[] CHROME_NAMESPACE = new String[] {"org.chromium.", "com.google."}; private static final String[] SYSTEM_NAMESPACE = new String[] {"android.accessibilityservice", "android.accounts", "android.animation", "android.annotation", "android.app", "android.appwidget", "android.bluetooth", "android.content", "android.database", "android.databinding", "android.drm", "android.gesture", "android.graphics", "android.hardware", "android.inputmethodservice", "android.location", "android.media", "android.mtp", "android.net", "android.nfc", "android.opengl", "android.os", "android.preference", "android.print", "android.printservice", "android.provider", "android.renderscript", "android.sax", "android.security", "android.service", "android.speech", "android.support", "android.system", "android.telecom", "android.telephony", "android.test", "android.text", "android.transition", "android.util", "android.view", "android.webkit", "android.widget", "com.android.", "dalvik.", "java.", "javax.", "org.apache.", "org.json.", "org.w3c.dom.", "org.xml.", "org.xmlpull."}; private final File mMinidumpFile; /** * @param minidump The minidump file that needs logcat output to be attached. */ public LogcatExtractionRunnable(File minidump) { mMinidumpFile = minidump; } @Override public void run() { Log.i(TAG, "Trying to extract logcat for minidump %s.", mMinidumpFile.getName()); CrashFileManager fileManager = new CrashFileManager(ContextUtils.getApplicationContext().getCacheDir()); File fileToUpload = mMinidumpFile; try { List<String> logcat = getElidedLogcat(); fileToUpload = new MinidumpLogcatPrepender(fileManager, mMinidumpFile, logcat).run(); Log.i(TAG, "Succeeded extracting logcat to %s.", fileToUpload.getName()); } catch (IOException | InterruptedException e) { Log.w(TAG, e.toString()); } // Regardless of success, initiate the upload. That way, even if there are errors augmenting // the minidump with logcat data, the service can still upload the unaugmented minidump. if (MinidumpUploadService.shouldUseJobSchedulerForUploads()) { MinidumpUploadService.scheduleUploadJob(); } else { try { MinidumpUploadService.tryUploadCrashDump(fileToUpload); } catch (SecurityException e) { // For KitKat and below, there was a framework bug which causes us to not be able to // find our own crash uploading service. Ignore a SecurityException here on older // OS versions since the crash will eventually get uploaded on next start. // crbug/542533 if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { throw e; } } } } private List<String> getElidedLogcat() throws IOException, InterruptedException { List<String> rawLogcat = getLogcat(); return Collections.unmodifiableList(elideLogcat(rawLogcat)); } @VisibleForTesting protected List<String> getLogcat() throws IOException, InterruptedException { // Grab the last lines of the logcat output, with a generous buffer to compensate for any // microdumps that might be in the logcat output, since microdumps are stripped in the // extraction code. Note that the repeated check of the process exit value is to account for // the fact that the process might not finish immediately. And, it's not appropriate to // call p.waitFor(), because this call will block *forever* if the process's output buffer // fills up. Process p = Runtime.getRuntime().exec("logcat -d"); BufferedReader reader = new BufferedReader(new InputStreamReader(p.getInputStream())); LinkedList<String> rawLogcat = new LinkedList<>(); Integer exitValue = null; try { while (exitValue == null) { String logLn; while ((logLn = reader.readLine()) != null) { rawLogcat.add(logLn); if (rawLogcat.size() > LOGCAT_SIZE * 4) { rawLogcat.removeFirst(); } } try { exitValue = p.exitValue(); } catch (IllegalThreadStateException itse) { Thread.sleep(HALF_SECOND); } } } finally { reader.close(); } if (exitValue != 0) { String msg = "Logcat failed: " + exitValue; Log.w(TAG, msg); throw new IOException(msg); } return trimLogcat(rawLogcat, LOGCAT_SIZE); } /** * Extracts microdump-free logcat for more informative crash reports. Returns the most recent * lines that are likely to be relevant to the crash, which are either the lines leading up to a * microdump if a microdump is present, or just the final lines of the logcat if no microdump is * present. * * @param rawLogcat The last lines of the raw logcat file, with sufficient history to allow a * sufficient history even after trimming. * @param maxLines The maximum number of lines logcat extracts from minidump. * * @return Logcat up to specified length as a list of strings. */ @VisibleForTesting protected static List<String> trimLogcat(List<String> rawLogcat, int maxLines) { // Trim off the last microdump, and anything after it. for (int i = rawLogcat.size() - 1; i >= 0; i--) { if (rawLogcat.get(i).contains(BEGIN_MICRODUMP)) { rawLogcat = rawLogcat.subList(0, i); rawLogcat.add(SNIPPED_MICRODUMP); break; } } // Trim down the remainder to only contain the most recent lines. Thus, if the original // input contained a microdump, the result contains the most recent lines before the // microdump, which are most likely to be relevant to the crash. If there is no microdump // in the raw logcat, then just hope that the last lines in the dump are relevant. if (rawLogcat.size() > maxLines) { rawLogcat = rawLogcat.subList(rawLogcat.size() - maxLines, rawLogcat.size()); } return rawLogcat; } @VisibleForTesting protected static List<String> elideLogcat(List<String> rawLogcat) { List<String> elided = new ArrayList<String>(rawLogcat.size()); for (String ln : rawLogcat) { ln = elideEmail(ln); ln = elideUrl(ln); ln = elideIp(ln); ln = elideMac(ln); ln = elideConsole(ln); elided.add(ln); } return elided; } /** * Elides any emails in the specified {@link String} with * {@link #EMAIL_ELISION}. * * @param original String potentially containing emails. * @return String with elided emails. */ @VisibleForTesting protected static String elideEmail(String original) { return Patterns.EMAIL_ADDRESS.matcher(original).replaceAll(EMAIL_ELISION); } /** * Elides any URLs in the specified {@link String} with * {@link #URL_ELISION}. * * @param original String potentially containing URLs. * @return String with elided URLs. */ @VisibleForTesting protected static String elideUrl(String original) { StringBuilder buffer = new StringBuilder(original); Matcher matcher = WEB_URL.matcher(buffer); int start = 0; while (matcher.find(start)) { start = matcher.start(); int end = matcher.end(); String url = buffer.substring(start, end); if (!likelyToBeChromeNamespace(url) && !likelyToBeSystemNamespace(url)) { buffer.replace(start, end, URL_ELISION); end = start + URL_ELISION.length(); matcher = WEB_URL.matcher(buffer); } start = end; } return buffer.toString(); } public static boolean likelyToBeChromeNamespace(String url) { for (String ns : CHROME_NAMESPACE) { if (url.startsWith(ns)) { return true; } } return false; } public static boolean likelyToBeSystemNamespace(String url) { for (String ns : SYSTEM_NAMESPACE) { if (url.startsWith(ns)) { return true; } } return false; } /** * Elides any IP addresses in the specified {@link String} with * {@link #IP_ELISION}. * * @param original String potentially containing IPs. * @return String with elided IPs. */ @VisibleForTesting protected static String elideIp(String original) { return Patterns.IP_ADDRESS.matcher(original).replaceAll(IP_ELISION); } /** * Elides any MAC addresses in the specified {@link String} with * {@link #MAC_ELISION}. * * @param original String potentially containing MACs. * @return String with elided MACs. */ @VisibleForTesting protected static String elideMac(String original) { return MAC_ADDRESS.matcher(original).replaceAll(MAC_ELISION); } /** * Elides any console messages in the specified {@link String} with * {@link #CONSOLE_ELISION}. * * @param original String potentially containing console messages. * @return String with elided console messages. */ @VisibleForTesting protected static String elideConsole(String original) { return CONSOLE_MSG.matcher(original).replaceAll(CONSOLE_ELISION); } }
/* * Copyright (C) 2015 Haruki Hasegawa * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.h6ah4i.android.example.advrecyclerview.demo_s; import android.support.v7.widget.RecyclerView; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.FrameLayout; import android.widget.TextView; import com.h6ah4i.android.example.advrecyclerview.R; import com.h6ah4i.android.example.advrecyclerview.common.data.AbstractDataProvider; import com.h6ah4i.android.widget.advrecyclerview.swipeable.RecyclerViewSwipeManager; import com.h6ah4i.android.widget.advrecyclerview.swipeable.SwipeableItemAdapter; import com.h6ah4i.android.widget.advrecyclerview.utils.AbstractSwipeableItemViewHolder; import com.h6ah4i.android.widget.advrecyclerview.utils.RecyclerViewAdapterUtils; public class MySwipeableItemAdapter extends RecyclerView.Adapter<MySwipeableItemAdapter.MyViewHolder> implements SwipeableItemAdapter<MySwipeableItemAdapter.MyViewHolder> { private static final String TAG = "MySwipeableItemAdapter"; private AbstractDataProvider mProvider; private EventListener mEventListener; private View.OnClickListener mItemViewOnClickListener; private View.OnClickListener mSwipeableViewContainerOnClickListener; public interface EventListener { void onItemRemoved(int position); void onItemPinned(int position); void onItemViewClicked(View v, boolean pinned); } public static class MyViewHolder extends AbstractSwipeableItemViewHolder { public FrameLayout mContainer; public TextView mTextView; public MyViewHolder(View v) { super(v); mContainer = (FrameLayout) v.findViewById(R.id.container); mTextView = (TextView) v.findViewById(android.R.id.text1); } @Override public View getSwipeableContainerView() { return mContainer; } } public MySwipeableItemAdapter(AbstractDataProvider dataProvider) { mProvider = dataProvider; mItemViewOnClickListener = new View.OnClickListener() { @Override public void onClick(View v) { onItemViewClick(v); } }; mSwipeableViewContainerOnClickListener = new View.OnClickListener() { @Override public void onClick(View v) { onSwipeableViewContainerClick(v); } }; // SwipeableItemAdapter requires stable ID, and also // have to implement the getItemId() method appropriately. setHasStableIds(true); } private void onItemViewClick(View v) { if (mEventListener != null) { mEventListener.onItemViewClicked(v, true); // true --- pinned } } private void onSwipeableViewContainerClick(View v) { if (mEventListener != null) { mEventListener.onItemViewClicked(RecyclerViewAdapterUtils.getParentViewHolderItemView(v), false); // false --- not pinned } } @Override public long getItemId(int position) { return mProvider.getItem(position).getId(); } @Override public int getItemViewType(int position) { return mProvider.getItem(position).getViewType(); } @Override public MyViewHolder onCreateViewHolder(ViewGroup parent, int viewType) { final LayoutInflater inflater = LayoutInflater.from(parent.getContext()); final View v = inflater.inflate((viewType == 0) ? R.layout.list_item : R.layout.list_item2, parent, false); return new MyViewHolder(v); } @Override public void onBindViewHolder(MyViewHolder holder, int position) { final AbstractDataProvider.Data item = mProvider.getItem(position); // set listeners // (if the item is *not pinned*, click event comes to the itemView) holder.itemView.setOnClickListener(mItemViewOnClickListener); // (if the item is *pinned*, click event comes to the mContainer) holder.mContainer.setOnClickListener(mSwipeableViewContainerOnClickListener); // set text holder.mTextView.setText(item.getText()); // set background resource (target view ID: container) final int swipeState = holder.getSwipeStateFlags(); if ((swipeState & RecyclerViewSwipeManager.STATE_FLAG_IS_UPDATED) != 0) { int bgResId; if ((swipeState & RecyclerViewSwipeManager.STATE_FLAG_IS_ACTIVE) != 0) { bgResId = R.drawable.bg_item_swiping_active_state; } else if ((swipeState & RecyclerViewSwipeManager.STATE_FLAG_SWIPING) != 0) { bgResId = R.drawable.bg_item_swiping_state; } else { bgResId = R.drawable.bg_item_normal_state; } holder.mContainer.setBackgroundResource(bgResId); } // set swiping properties holder.setSwipeItemSlideAmount( item.isPinnedToSwipeLeft() ? RecyclerViewSwipeManager.OUTSIDE_OF_THE_WINDOW_LEFT : 0); } @Override public int getItemCount() { return mProvider.getCount(); } @Override public int onGetSwipeReactionType(MyViewHolder holder, int position, int x, int y) { return mProvider.getItem(position).getSwipeReactionType(); } @Override public void onSetSwipeBackground(MyViewHolder holder, int position, int type) { int bgRes = 0; switch (type) { case RecyclerViewSwipeManager.DRAWABLE_SWIPE_NEUTRAL_BACKGROUND: bgRes = R.drawable.bg_swipe_item_neutral; break; case RecyclerViewSwipeManager.DRAWABLE_SWIPE_LEFT_BACKGROUND: bgRes = R.drawable.bg_swipe_item_left; break; case RecyclerViewSwipeManager.DRAWABLE_SWIPE_RIGHT_BACKGROUND: bgRes = R.drawable.bg_swipe_item_right; break; } holder.itemView.setBackgroundResource(bgRes); } @Override public int onSwipeItem(MyViewHolder holder, int position, int result) { Log.d(TAG, "onSwipeItem(position = " + position + ", result = " + result + ")"); switch (result) { // swipe right case RecyclerViewSwipeManager.RESULT_SWIPED_RIGHT: if (mProvider.getItem(position).isPinnedToSwipeLeft()) { // pinned --- back to default position return RecyclerViewSwipeManager.AFTER_SWIPE_REACTION_DEFAULT; } else { // not pinned --- remove return RecyclerViewSwipeManager.AFTER_SWIPE_REACTION_REMOVE_ITEM; } // swipe left -- pin case RecyclerViewSwipeManager.RESULT_SWIPED_LEFT: return RecyclerViewSwipeManager.AFTER_SWIPE_REACTION_MOVE_TO_SWIPED_DIRECTION; // other --- do nothing case RecyclerViewSwipeManager.RESULT_CANCELED: default: return RecyclerViewSwipeManager.AFTER_SWIPE_REACTION_DEFAULT; } } @Override public void onPerformAfterSwipeReaction(MyViewHolder holder, int position, int result, int reaction) { Log.d(TAG, "onPerformAfterSwipeReaction(position = " + position + ", result = " + result + ", reaction = " + reaction + ")"); final AbstractDataProvider.Data item = mProvider.getItem(position); if (reaction == RecyclerViewSwipeManager.AFTER_SWIPE_REACTION_REMOVE_ITEM) { mProvider.removeItem(position); notifyItemRemoved(position); if (mEventListener != null) { mEventListener.onItemRemoved(position); } } else if (reaction == RecyclerViewSwipeManager.AFTER_SWIPE_REACTION_MOVE_TO_SWIPED_DIRECTION) { item.setPinnedToSwipeLeft(true); notifyItemChanged(position); if (mEventListener != null) { mEventListener.onItemPinned(position); } } else { item.setPinnedToSwipeLeft(false); } } public EventListener getEventListener() { return mEventListener; } public void setEventListener(EventListener eventListener) { mEventListener = eventListener; } }
/* * Copyright 2014 Rackspace * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.rackspacecloud.blueflood.inputs.handlers; import com.codahale.metrics.Meter; import com.google.common.util.concurrent.ListenableFuture; import com.google.gson.Gson; import com.netflix.astyanax.serializers.AbstractSerializer; import com.rackspacecloud.blueflood.inputs.formats.AggregatedPayload; import com.rackspacecloud.blueflood.io.Instrumentation; import com.rackspacecloud.blueflood.io.serializers.Serializers; import com.rackspacecloud.blueflood.outputs.formats.ErrorResponse; import com.rackspacecloud.blueflood.outputs.handlers.HandlerTestsBase; import com.rackspacecloud.blueflood.service.Configuration; import com.rackspacecloud.blueflood.service.CoreConfig; import com.rackspacecloud.blueflood.types.*; import com.rackspacecloud.blueflood.utils.DefaultClockImpl; import com.rackspacecloud.blueflood.utils.TimeValue; import io.netty.channel.Channel; import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelHandlerContext; import io.netty.handler.codec.http.FullHttpRequest; import io.netty.handler.codec.http.FullHttpResponse; import io.netty.handler.codec.http.HttpResponseStatus; import junit.framework.Assert; import org.junit.Before; import org.junit.Test; import org.mockito.ArgumentCaptor; import java.io.IOException; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.concurrent.TimeUnit; import static com.rackspacecloud.blueflood.TestUtils.*; import static junit.framework.Assert.assertEquals; import static org.mockito.Matchers.*; import static org.mockito.Matchers.anyInt; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.*; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; public class HttpAggregatedMultiIngestionHandlerTest extends HandlerTestsBase { private HttpAggregatedMultiIngestionHandler handler; private HttpMetricsIngestionServer.Processor processor; private ChannelHandlerContext context; private Channel channel; private ChannelFuture channelFuture; private Meter ingestedMetrics; private Meter ingestedDelayedMetrics; private static final String TENANT = "tenant"; private List<AggregatedPayload> bundleList; private final String postfix = ".pref"; @Before public void buildBundle() throws Exception { processor = mock(HttpMetricsIngestionServer.Processor.class); handler = new HttpAggregatedMultiIngestionHandler(processor, new TimeValue(5, TimeUnit.SECONDS)); channel = mock(Channel.class); context = mock(ChannelHandlerContext.class); channelFuture = mock(ChannelFuture.class); when(context.channel()).thenReturn(channel); when(channel.write(anyString())).thenReturn(channelFuture); ListenableFuture mockFuture = mock(ListenableFuture.class); when(processor.apply(any(MetricsCollection.class))).thenReturn(mockFuture); when(mockFuture.get(anyLong(), any(TimeUnit.class))).thenReturn(new ArrayList<Boolean>()); String json = getJsonFromFile("sample_multi_aggregated_payload.json", postfix); bundleList = HttpAggregatedMultiIngestionHandler.createBundleList(json); ingestedMetrics = Instrumentation.getIngestedMetricsMeter(TENANT); ingestedDelayedMetrics = Instrumentation.getIngestedDelayedMetricsMeter(TENANT); } @Test public void testMultiBundle() { HashSet<String> tenantIdSet = new HashSet<String>(); HashSet<Long> timestampSet = new HashSet<Long>(); Assert.assertTrue(bundleList.size() == 3); for (AggregatedPayload bundle : bundleList) { tenantIdSet.add(bundle.getTenantId()); timestampSet.add(bundle.getTimestamp()); } Assert.assertTrue(tenantIdSet.size() == 3); //3 unique timestamps are supported Assert.assertTrue(timestampSet.size() == 3); //3 unique tenants are supported } @Test public void testCounters() { for (AggregatedPayload bundle : bundleList) { Collection<PreaggregatedMetric> counters = PreaggregateConversions.convertCounters("1", 1, 15000, bundle.getCounters()); Assert.assertEquals(1, counters.size()); ensureSerializability(counters); } } @Test public void testEmptyButValidMultiJSON() { String badJson = "[]"; List<AggregatedPayload> bundle = HttpAggregatedMultiIngestionHandler.createBundleList(badJson); } @Test public void testGauges() { for (AggregatedPayload bundle : bundleList) { Collection<PreaggregatedMetric> gauges = PreaggregateConversions.convertGauges("1", 1, bundle.getGauges()); Assert.assertEquals(1, gauges.size()); ensureSerializability(gauges); } } @Test public void testSets() { for (AggregatedPayload bundle : bundleList) { Collection<PreaggregatedMetric> sets = PreaggregateConversions.convertSets("1", 1, bundle.getSets()); Assert.assertEquals(1, sets.size()); ensureSerializability(sets); } } @Test public void testTimers() { for (AggregatedPayload bundle : bundleList) { Collection<PreaggregatedMetric> timers = PreaggregateConversions.convertTimers("1", 1, bundle.getTimers()); Assert.assertEquals(1, timers.size()); ensureSerializability(timers); } } // ok. while we're out it, let's test serialization. Just for fun. The reasoning is that these metrics // follow a different creation path that what we currently have in tests. private static void ensureSerializability(Collection<PreaggregatedMetric> metrics) { for (PreaggregatedMetric metric : metrics) { AbstractSerializer serializer = Serializers.serializerFor(metric.getMetricValue().getClass()); serializer.toByteBuffer(metric.getMetricValue()); } } @Test public void testEmptyRequest() throws IOException { String requestBody = ""; FullHttpRequest request = createIngestRequest(requestBody); ArgumentCaptor<FullHttpResponse> argument = ArgumentCaptor.forClass(FullHttpResponse.class); handler.handle(context, request); verify(channel).write(argument.capture()); String errorResponseBody = argument.getValue().content().toString(Charset.defaultCharset()); ErrorResponse errorResponse = getErrorResponse(errorResponseBody); assertEquals("Number of errors invalid", 1, errorResponse.getErrors().size()); assertEquals("Invalid error message", "Invalid request body", errorResponse.getErrors().get(0).getMessage()); assertEquals("Invalid tenant", TENANT, errorResponse.getErrors().get(0).getTenantId()); assertEquals("Invalid status", HttpResponseStatus.BAD_REQUEST, argument.getValue().getStatus()); } @Test public void testNonArrayJsonRequest() throws IOException { String requestBody = "{}"; //causes JsonMappingException FullHttpRequest request = createIngestRequest(requestBody); ArgumentCaptor<FullHttpResponse> argument = ArgumentCaptor.forClass(FullHttpResponse.class); handler.handle(context, request); verify(channel).write(argument.capture()); String errorResponseBody = argument.getValue().content().toString(Charset.defaultCharset()); ErrorResponse errorResponse = getErrorResponse(errorResponseBody); assertEquals("Number of errors invalid", 1, errorResponse.getErrors().size()); assertEquals("Invalid error message", "Invalid request body", errorResponse.getErrors().get(0).getMessage()); assertEquals("Invalid tenant", TENANT, errorResponse.getErrors().get(0).getTenantId()); assertEquals("Invalid status", HttpResponseStatus.BAD_REQUEST, argument.getValue().getStatus()); } @Test public void testEmptyArrayJsonRequest() throws IOException { String requestBody = "[]"; //causes JsonMappingException FullHttpRequest request = createIngestRequest(requestBody); ArgumentCaptor<FullHttpResponse> argument = ArgumentCaptor.forClass(FullHttpResponse.class); handler.handle(context, request); verify(channel).write(argument.capture()); String responseBody = argument.getValue().content().toString(Charset.defaultCharset()); assertEquals("Invalid response", "No valid metrics", responseBody); assertEquals("Invalid status", HttpResponseStatus.BAD_REQUEST, argument.getValue().getStatus()); } @Test public void perTenantMetricsOn_emptyRequest_shouldNotRecordAnything() throws IOException { String requestBody = "[]"; FullHttpRequest request = createIngestRequest(requestBody); long ingestedMetricsBefore = ingestedMetrics.getCount(); long ingestedDelayedMetricsBefore = ingestedDelayedMetrics.getCount(); HttpAggregatedMultiIngestionHandler handler = spy(new HttpAggregatedMultiIngestionHandler(processor, new TimeValue(5, TimeUnit.SECONDS), true)); ArgumentCaptor<FullHttpResponse> argument = ArgumentCaptor.forClass(FullHttpResponse.class); handler.handle(context, request); verify(channel).write(argument.capture()); verify(handler, never()).recordPerTenantMetrics(eq(TENANT), anyInt(), anyInt()); assertEquals("ingested metrics count", 0, ingestedMetrics.getCount() - ingestedMetricsBefore); assertEquals("ingested delayed metrics count", 0, ingestedDelayedMetrics.getCount() - ingestedDelayedMetricsBefore); } @Test public void perTenantMetricsOn_shouldRecordDelayedMetrics() throws Exception { long delayedTime = new DefaultClockImpl().now().getMillis() - 100 - Configuration.getInstance().getLongProperty(CoreConfig.ROLLUP_DELAY_MILLIS); FullHttpRequest request = createIngestRequest( getJsonFromFile("sample_multi_aggregated_payload.json", delayedTime, postfix)); long ingestedMetricsBefore = ingestedMetrics.getCount(); long ingestedDelayedMetricsBefore = ingestedDelayedMetrics.getCount(); ListenableFuture<List<Boolean>> futures = mock(ListenableFuture.class); List<Boolean> answers = new ArrayList<>(); answers.add(Boolean.TRUE); when(processor.apply(any())).thenReturn(futures); when(futures.get(anyLong(), any())).thenReturn(answers); HttpAggregatedMultiIngestionHandler handler = spy(new HttpAggregatedMultiIngestionHandler(processor, new TimeValue(5, TimeUnit.SECONDS), true)); ArgumentCaptor<FullHttpResponse> argument = ArgumentCaptor.forClass(FullHttpResponse.class); handler.handle(context, request); verify(channel).write(argument.capture()); verify(handler, times(1)).recordPerTenantMetrics(eq(TENANT), eq(0), eq(12)); assertEquals("ingested metrics count", 0, ingestedMetrics.getCount() - ingestedMetricsBefore); assertEquals("ingested delayed metrics count", 12, ingestedDelayedMetrics.getCount() - ingestedDelayedMetricsBefore); } @Test public void perTenantMetricsOn_shouldRecordNonDelayedMetrics() throws Exception { long timestamp = new DefaultClockImpl().now().getMillis(); FullHttpRequest request = createIngestRequest( getJsonFromFile("sample_multi_aggregated_payload.json", timestamp, postfix)); long ingestedMetricsBefore = ingestedMetrics.getCount(); long ingestedDelayedMetricsBefore = ingestedDelayedMetrics.getCount(); ListenableFuture<List<Boolean>> futures = mock(ListenableFuture.class); List<Boolean> answers = new ArrayList<>(); answers.add(Boolean.TRUE); when(processor.apply(any())).thenReturn(futures); when(futures.get(anyLong(), any())).thenReturn(answers); HttpAggregatedMultiIngestionHandler handler = spy(new HttpAggregatedMultiIngestionHandler(processor, new TimeValue(5, TimeUnit.SECONDS), true)); ArgumentCaptor<FullHttpResponse> argument = ArgumentCaptor.forClass(FullHttpResponse.class); handler.handle(context, request); verify(channel).write(argument.capture()); verify(handler, times(1)).recordPerTenantMetrics(eq(TENANT), eq(12), eq(0)); assertEquals("ingested metrics count", 12, ingestedMetrics.getCount() - ingestedMetricsBefore); assertEquals("ingested delayed metrics count", 0, ingestedDelayedMetrics.getCount() - ingestedDelayedMetricsBefore); } @Test public void perTenantMetricsOff_shouldNotRecordMetrics() throws Exception { long timestamp = new DefaultClockImpl().now().getMillis(); FullHttpRequest request = createIngestRequest( getJsonFromFile("sample_multi_aggregated_payload.json", timestamp, postfix)); long ingestedMetricsBefore = ingestedMetrics.getCount(); long ingestedDelayedMetricsBefore = ingestedDelayedMetrics.getCount(); ListenableFuture<List<Boolean>> futures = mock(ListenableFuture.class); List<Boolean> answers = new ArrayList<>(); answers.add(Boolean.TRUE); when(processor.apply(any())).thenReturn(futures); when(futures.get(anyLong(), any())).thenReturn(answers); // turn off per tenant metrics tracking HttpAggregatedMultiIngestionHandler handler = spy(new HttpAggregatedMultiIngestionHandler(processor, new TimeValue(5, TimeUnit.SECONDS), false)); ArgumentCaptor<FullHttpResponse> argument = ArgumentCaptor.forClass(FullHttpResponse.class); handler.handle(context, request); verify(channel).write(argument.capture()); verify(handler, times(1)).recordPerTenantMetrics(eq(TENANT), eq(12), eq(0)); assertEquals("ingested metrics count", 0, ingestedMetrics.getCount() - ingestedMetricsBefore); assertEquals("ingested delayed metrics count", 0, ingestedDelayedMetrics.getCount() - ingestedDelayedMetricsBefore); } private FullHttpRequest createIngestRequest(String requestBody) { return super.createPostRequest("/v2.0/" + TENANT + "/aggregated/multi", requestBody); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package java.lang; import ch.ntb.inf.deep.lowLevel.LL; import ch.ntb.inf.deep.marker.Modified; /** * The wrapper for the primitive type {@code double}. * * @see java.lang.Number * @since 1.0 */ /* Changes: * 27.5.2014 Urs Graf initial import and modified */ public final class Double extends Number implements Comparable<Double>, Modified { private static final long serialVersionUID = -9172774392245257468L; /** * A constant holding the positive infinity of type <code>double</code>. * It is equal to the value returned by * <code>Double.longBitsToDouble(0x7ff0000000000000L)</code>. */ public static final double POSITIVE_INFINITY = 1.0 / 0.0; /** * A constant holding the negative infinity of type <code>double</code>. * It is equal to the value returned by * <code>Double.longBitsToDouble(0xfff0000000000000L)</code>. */ public static final double NEGATIVE_INFINITY = -1.0 / 0.0; /** * A constant holding a Not-a-Number (NaN) value of type <code>double</code>. * It is equivalent to the value returned by * <code>Double.longBitsToDouble(0x7ff8000000000000L)</code>. */ public static final double NaN = 0.0d / 0.0; /** * A constant holding the largest positive finite value of type * <code>double</code>, (2-2<sup>-52</sup>)&middot;2<sup>1023</sup>. * It is equal to the hexadecimal floating-point literal * <code>0x1.fffffffffffffP+1023</code> and also equal to * <code>Double.longBitsToDouble(0x7fefffffffffffffL)</code>. */ public static final double MAX_VALUE = 1.7976931348623157e+308; // 0x1.fffffffffffffP+1023 /** * A constant holding the smallest positive nonzero value of type * <code>double</code>, 2<sup>-1074</sup>. It is equal to the * hexadecimal floating-point literal <code>0x0.0000000000001P-1022</code> * and also equal to <code>Double.longBitsToDouble(0x1L)</code>. */ public static final double MIN_VALUE = 4.9e-324; // 0x0.0000000000001P-1022 /** * A constant holding the smallest positive normalized value of type * <code>double</code>, 2<sup>-1074</sup>. It is equal to the * hexadecimal floating-point literal * <code>Double.longBitsToDouble(1L << 52)</code>. */ public static final double MIN_VALUE_NORM = MIN_VALUE * (double) (1L << 52); /** * The number of bits used to represent a <tt>double</tt> value. */ public static final int SIZE = 64; private static final int expOffset = 0x3ff; public static final double INF_EXPONENT = expOffset + 1; private static final byte dMaxNofFractionDigits = 15; private static final char[] digits = new char[dMaxNofFractionDigits + 1]; private static char[] gchars; private static int nofChars; private static final int highNaN = 0x7ff80000; private static final int highINF = 0x7ff00000; private static final double[] tene = { // exact powers of 10 1E0, 1E1, 1E2, 1E3, 1E4, 1E5, 1E6, 1E7, 1E8, 1E9, 1E10, 1E11, 1E12, 1E13, 1E14, 1E15, 1E16, 1E17, 1E18, 1E19, 1E20, 1E21, 1E22 }; private static final double[] ten = { // rounded powers of 10 1E-307, 1E-284, 1E-261, 1E-238, 1E-215, 1E-192, 1E-169, 1E-146, 1E-123, 1E-100, 1E-77, 1E-54, 1E-31, 1E-8, 1E15, 1E38, 1E61, 1E84, 1E107, 1E130, 1E153, 1E176, 1E199, 1E222, 1E245, 1E268, 1E291 }; private static final int[] eq = { 0x96810239, // eq[ 0] = {0, 3..5, 9, 16, 23, 25, 26, 28, 31} 0xFBBEFF64, // eq[ 1] = {2, 5, 6, 8..15, 17..21, 23..25, 27..31} 0x1FFFFFFF, // eq[ 2] = {0..28} 0xF85FCBEF, // eq[ 3] = {0..3, 5..9, 11, 14..20, 22, 27..31} 0xFFFCFCC1, // eq[ 4] = {0, 6, 7, 10..15, 18..31} 0xFFFBFFE3, // eq[ 5] = {0, 1, 5..17, 19..31} 0xF7B5C5B3, // eq[ 6] = {0, 1, 4, 5, 7, 8, 10, 14..16, 18, 20, 21, 23..26, 28..31} 0xF58F7FFB, // eq[ 7] = {0, 1, 3..14, 16..19, 23, 24, 26, 28..31} 0x273F4F7F, // eq[ 8] = {0..6, 8..11, 14, 16..21, 24..26, 29} 0xFFFFFE56, // eq[ 9] = {1, 2, 4, 6, 9..31} 0x7FFFFFFF, // eq[ 10] = {0..30} 0x78F9F5FF, // eq[ 11] = {0..8, 10, 12..16, 19..23, 27..30} 0xECBFD7BF, // eq[ 12] = {0..5, 7..10, 12, 14..21, 23, 26, 27, 29..31} 0xF9B7EEFF, // eq[ 13] = {0..7, 9..11, 13..18, 20, 21, 23, 24, 27..31} 0xFFFFFFCF, // eq[ 14] = {0..3, 6..31} 0x17FFBBFF, // eq[ 15] = {0..9, 11..13, 15..26, 28} 0xFF4F2816, // eq[ 16] = {1, 2, 4, 11, 13, 16..19, 22, 24..31} 0xBEBCCBFE, // eq[ 17] = {1..9, 11, 14, 15, 18..21, 23, 25..29, 31} 0x3DDB7B75, // eq[ 18] = {0, 2, 4..6, 8, 9, 11..14, 16, 17, 19, 20, 22..24, 26..29} 0x000000FC, // eq[ 19] = {2..7} }; private static final int[] gr = { 0x69000000, // gr[ 0] = {24, 27, 29, 30} 0x0000009B, // gr[ 1] = {0, 1, 3, 4, 7} 0xE0000000, // gr[ 2] = {29..31} 0x07A03410, // gr[ 3] = {4, 10, 12, 13, 21, 23..26} 0x0003033E, // gr[ 4] = {1..5, 8, 9, 16, 17} 0x0004001C, // gr[ 5] = {2..4, 18} 0x084A3A4C, // gr[ 6] = {2, 3, 6, 9, 11..13, 17, 19, 22, 27} 0x00000004, // gr[ 7] = {2} 0xD8C0B080, // gr[ 8] = {7, 12, 13, 15, 22, 23, 27, 28, 30, 31} 0x000001A9, // gr[ 9] = {0, 3, 5, 7, 8} 0x00000000, // gr[ 10] = {} 0x00000000, // gr[ 11] = {} 0x13402800, // gr[ 12] = {11, 13, 22, 24, 25, 28} 0x06400000, // gr[ 13] = {22, 25, 26} 0x00000030, // gr[ 14] = {4, 5} 0xE8004400, // gr[ 15] = {10, 14, 27, 29..31} 0x00B0D7E9, // gr[ 16] = {0, 3, 5..10, 12, 14, 15, 20, 21, 23} 0x41433401, // gr[ 17] = {0, 10, 12, 13, 16, 17, 22, 24, 30} 0x00000000, // gr[ 18] = {} 0x00000000 // gr[ 19] = {} }; /** * Calculates power to the base of 10 * * @param e * this is the exponent * @return power of 10<sup>e</sup>. */ public static double powOf10(int e) { double r; if (e < -307) return 0; else if (e > 308) return Double.NaN; e += 307; r = ten[e / 23] * tene[e % 23]; if (((1 << e) & eq[e >>> 5]) != 0) return r; int E = Double.getExponent(r); r = Double.setExponent(r, 52); if (((1 << e) & gr[e >>> 5]) != 0) r = r - 1; else r = r + 1; r = Double.setExponent(r, E); return r; } /** * Returns an integer corresponding to the upper 32 bits of the given * <a href="http://en.wikipedia.org/wiki/IEEE_754-1985">IEEE 754</a> double precision * {@code value}. */ public static int highPartToIntBits(double arg) { long doubleBits = LL.doubleToBits(arg); return (int)(doubleBits >> 32); } /** * Returns an integer corresponding to the lower 32 bits of the given * <a href="http://en.wikipedia.org/wiki/IEEE_754-1985">IEEE 754</a> double precision * {@code value}. */ public static int lowPartToIntBits(double arg) { long doubleBits = LL.doubleToBits(arg); return (int)(doubleBits); } /** * Returns the exponent of a double precision {@code value} * to the base of 2. */ public static int getExponent(double arg) { int highBits = highPartToIntBits(arg); return ((highBits >> 20) & 0x7ff) - expOffset; } /** * Sets the exponent of a double precision {@code value} * to the base of 2 and returns the new value. */ public static double setExponent(double d, int newExp) { long bits = LL.doubleToBits(d); newExp += expOffset; bits &= 0x800fffffffffffffL; bits |= (long)(newExp) << 52; return LL.bitsToDouble(bits); } private static void putChar(char ch) { gchars[nofChars] = ch; nofChars++; } public static int doubleToChars(double val, int nofFractDigits, char[] chars) { gchars = chars; nofChars = 0; if (chars == null) return 0; int high = highPartToIntBits(val); if ((high & highINF) == highINF) { if ((high & highNaN) == highNaN) { // NaN putChar('N'); putChar('a'); putChar('N'); } else { // INF if (high >= 0) putChar('+'); else putChar('-'); putChar('I'); putChar('N'); putChar('F'); } // putChar('\0'); gchars = null; return nofChars; } int exp = (high & highINF) >> 20; if (exp != 0 && high < 0) { putChar('-'); val = -val; } int low; if (exp == 0) { // no denormals high = 0; low = 0; } else { if (nofFractDigits < 1) nofFractDigits = 1; else if (nofFractDigits > 15) nofFractDigits = 15; exp = (exp - expOffset) * 301029; if (exp % 1000000 < 0) exp = exp / 1000000 - 1; else exp = exp / 1000000; double z = powOf10(exp + 1); if (val >= z) { val = val / z; exp++; } else { val = val * powOf10(-exp); } if (val >= 10) { val = val * 0.1 + 0.5 / powOf10(nofFractDigits); exp++; } else { val = val + 0.5 / powOf10(nofFractDigits); if (val >= 10) { val = val * 0.1; exp++; } } val = val * 1E7; high = (int) val; low = (int) ((val - high) * 1E8); } int dig = 15; while (dig > 7) { digits[dig] = (char) (low % 10 + '0'); low = low / 10; dig--; } while (dig >= 0) { digits[dig] = (char) (high % 10 + '0'); high = high / 10; dig--; } putChar(digits[0]); putChar('.'); dig = 1; while (dig <= nofFractDigits) { putChar(digits[dig]); dig++; } putChar('E'); if (exp >= 0) putChar('+'); else {putChar('-'); exp = -exp;} putChar((char) (exp / 100 % 10 + '0')); putChar((char) (exp / 10 % 10 + '0')); putChar((char) (exp % 10 + '0')); //putChar('\0'); gchars = null; return nofChars; } /** * Returns a {@code Double} instance for the specified double value. * * @param d * the double value to store in the instance. * @return a {@code Double} instance containing {@code d}. * @since 1.5 */ public static Double valueOf(double d) { return new Double(d); } /** * The value of the {@code Double}. * * @serial */ private final double value; /** * Constructs a newly allocated {@code Double} object that * represents the primitive {@code double} argument. * * @param value the value to be represented by the {@code Double}. */ public Double(double value) { this.value = value; } /** * Constructs a new {@code Double} from the specified string. * * @param string * the string representation of a double value. * @throws NumberFormatException * if {@code string} cannot be parsed as a double value. * @see #parseDouble(String) */ public Double(String string) throws NumberFormatException { this(parseDouble(string)); } /** * Parses the specified string as a double value. * * @param string * the string representation of a double value. * @return a {@code Double} instance containing the double value represented * by {@code string}. * @throws NumberFormatException * if {@code string} cannot be parsed as a double value. * @see #parseDouble(String) */ public static Double valueOf(String string) throws NumberFormatException { return parseDouble(string); } /** * Returns the closest double value to the real number in the string. * * @param s * the String that will be parsed to a floating point * @return the double closest to the real number * * @exception NumberFormatException * if the String doesn't represent a double */ public static double parseDouble(String s) throws NumberFormatException { if (s == null) { throw new NumberFormatException("Invalid double"); } int length = s.length(); if (length == 0) { throw new NumberFormatException("Invalid double"); } char c = s.charAt(length-1); if (c == 'f' || c == 'F') length--; int start = 0; boolean neg = false; c = s.charAt(0); if (c == '-') {start = 1; neg = true;} if (c == '+') start = 1; int dot = length; for (int i = start; i < length; i++) if (s.charAt(i) == '.') dot = i; int esign = length; for (int i = start; i < length; i++) { char ch = s.charAt(i); if (ch == 'e' || ch == 'E') esign = i; } long num = 0; for (int i = start; i < esign; i++) { if (i != dot) num = num * 10 + s.charAt(i) - '0'; } double res = num; int cnt = 0; for (int i = dot + 1; i < esign; i++) cnt++; res = res / powOf10(cnt); int exp = 0; boolean eneg = false; start = esign + 1; if (start < length) { c = s.charAt(start); if (c == '-') {start++; eneg = true;} if (c == '+') start ++; } for (int i = start; i < length; i++) { exp = exp * 10 + s.charAt(i) - '0'; } if (eneg) exp = -exp; res = res * powOf10(exp); if (neg) res = -res; return res; } /** * Returns the {@code double} value of this * {@code Double} object. * * @return the {@code double} value represented by this object */ public double doubleValue() { return value; } @Override public byte byteValue() { return (byte) value; } @Override public float floatValue() { return (float) value; } @Override public int intValue() { return (int) value; } @Override public long longValue() { return (long) value; } @Override public short shortValue() { return (short) value; } @Override public int hashCode() { int low = lowPartToIntBits(value); int high = highPartToIntBits(value); return low ^ high; } @Override public String toString() { return Double.toString(value); } private static char[] str1 = new char[64]; /** * Returns a string containing a concise, human-readable description of the * specified double value. * * @param d * the double to convert to a string. * @return a printable representation of {@code d}. */ public static String toString(double d) { doubleToChars(d, 15, str1); return new String(str1); } /** * Compares this object to the specified double object to determine their * relative order. There are two special cases: * <ul> * <li>{@code Double.NaN} is equal to {@code Double.NaN} and it is greater * than any other double value, including {@code Double.POSITIVE_INFINITY};</li> * <li>+0.0d is greater than -0.0d</li> * </ul> * * @param object * the double object to compare this object to. * @return a negative value if the value of this double is less than the * value of {@code object}; 0 if the value of this double and the * value of {@code object} are equal; a positive value if the value * of this double is greater than the value of {@code object}. * @throws NullPointerException * if {@code object} is {@code null}. * @see java.lang.Comparable * @since 1.2 */ public int compareTo(Double object) { return compare(value, object.value); } /** * Compares the two specified double values. There are two special cases: * <ul> * <li>{@code Double.NaN} is equal to {@code Double.NaN} and it is greater * than any other double value, including {@code Double.POSITIVE_INFINITY};</li> * <li>+0.0d is greater than -0.0d</li> * </ul> * * @param double1 * the first value to compare. * @param double2 * the second value to compare. * @return a negative value if {@code double1} is less than {@code double2}; * 0 if {@code double1} and {@code double2} are equal; a positive * value if {@code double1} is greater than {@code double2}. */ public static int compare(double double1, double double2) { // Non-zero, non-NaN checking. if (double1 > double2) { return 1; } if (double2 > double1) { return -1; } if (double1 == double2 && 0.0d != double1) { return 0; } // NaNs are equal to other NaNs and larger than any other double if (isNaN(double1)) { if (isNaN(double2)) { return 0; } return 1; } else if (isNaN(double2)) { return -1; } // Deal with +0.0 and -0.0 long d1 = highPartToIntBits(double1); long d2 = highPartToIntBits(double2); // The below expression is equivalent to: // (d1 == d2) ? 0 : (d1 < d2) ? -1 : 1 return (int) ((d1 >> 31) - (d2 >> 31)); } /** * Tests this double for equality with {@code object}. * To be equal, {@code object} must be an instance of {@code Double} and * {@code doubleToLongBits} must give the same value for both objects. * * <p>Note that, unlike {@code ==}, {@code -0.0} and {@code +0.0} compare * unequal, and {@code NaN}s compare equal by this method. * * @param object * the object to compare this double with. * @return {@code true} if the specified object is equal to this * {@code Double}; {@code false} otherwise. */ @Override public boolean equals(Object object) { return (object instanceof Double) && (highPartToIntBits(this.value) == highPartToIntBits(((Double) object).value)) && (lowPartToIntBits(this.value) == lowPartToIntBits(((Double) object).value)); } /** * Indicates whether this object represents an infinite value. * * @return {@code true} if the value of this double is positive or negative * infinity; {@code false} otherwise. */ public boolean isInfinite() { return isInfinite(value); } /** * Indicates whether the specified double represents an infinite value. * * @param d * the double to check. * @return {@code true} if the value of {@code d} is positive or negative * infinity; {@code false} otherwise. */ public static boolean isInfinite(double d) { return (d == POSITIVE_INFINITY) || (d == NEGATIVE_INFINITY); } /** * Indicates whether this object is a <em>Not-a-Number (NaN)</em> value. * * @return {@code true} if this double is <em>Not-a-Number</em>; * {@code false} if it is a (potentially infinite) double number. */ public boolean isNaN() { return isNaN(value); } /** * Indicates whether the specified double is a <em>Not-a-Number (NaN)</em> * value. * * @param d * the double value to check. * @return {@code true} if {@code d} is <em>Not-a-Number</em>; * {@code false} if it is a (potentially infinite) double number. */ public static boolean isNaN(double d) { return d != d; } }
/* * Zed Attack Proxy (ZAP) and its related class files. * * ZAP is an HTTP/HTTPS proxy for assessing web application security. * * Copyright 2013 The ZAP Development Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.zaproxy.zap.extension.diff; import difflib.DiffRow; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import org.parosproxy.paros.Constant; import org.parosproxy.paros.extension.ExtensionAdaptor; import org.parosproxy.paros.extension.ExtensionHook; import org.parosproxy.paros.network.HttpMessage; import org.zaproxy.zap.extension.diff.ZapDiffRowGenerator.Builder; import org.zaproxy.zap.extension.diff.diff_match_patch.Diff; public class ExtensionDiff extends ExtensionAdaptor { private PopupMenuDiff popupMenuDiffRequests = null; private PopupMenuDiff popupMenuDiffResponses = null; private DiffDialog diffDialog = null; /** */ public ExtensionDiff() { super("ExtensionDiff"); this.setOrder(75); } @Override public boolean canUnload() { return true; } @Override public void unload() { if (getView() != null) { if (diffDialog != null) { diffDialog.dispose(); diffDialog = null; } } super.unload(); } public void hook(ExtensionHook extensionHook) { super.hook(extensionHook); if (getView() != null) { extensionHook.getHookMenu().addPopupMenuItem(getPopupMenuDiffRequests()); extensionHook.getHookMenu().addPopupMenuItem(getPopupMenuDiffResponses()); } } private PopupMenuDiff getPopupMenuDiffRequests() { if (popupMenuDiffRequests == null) { popupMenuDiffRequests = new PopupMenuDiff( Constant.messages.getString("diff.diff.req.popup"), this, true); } return popupMenuDiffRequests; } private PopupMenuDiff getPopupMenuDiffResponses() { if (popupMenuDiffResponses == null) { popupMenuDiffResponses = new PopupMenuDiff( Constant.messages.getString("diff.diff.resp.popup"), this, false); } return popupMenuDiffResponses; } @Override public String getAuthor() { return Constant.ZAP_TEAM; } private void stringToList(String str, List<String> list) { for (String s : str.split("\n")) { list.add(s); } } private DiffDialog getDiffDialog() { if (diffDialog == null) { diffDialog = new DiffDialog(getView().getMainFrame(), false); } return diffDialog; } public void showDiffDialog(HttpMessage msg1, HttpMessage msg2, boolean request) throws Exception { /* * This _is_ fairly nasty ;) * This method uses 2 different classes/projects to work out the diffs. * It uses diffutils to get the differing lines, and then diff_match_patch to identify the * diffs in the pairs of lines. * Be delighted if anyone can implement a cleaner option ;) */ if (msg1 == null || msg2 == null) { return; } DiffDialog diffDialog = this.getDiffDialog(); if (diffDialog.isVisible()) { return; } diffDialog.clearPanels(); List<String> msgList1 = new ArrayList<String>(); List<String> msgList2 = new ArrayList<String>(); if (request) { stringToList(msg1.getRequestHeader().toString(), msgList1); stringToList(msg1.getRequestBody().toString(), msgList1); stringToList(msg2.getRequestHeader().toString(), msgList2); stringToList(msg2.getRequestBody().toString(), msgList2); } else { stringToList(msg1.getResponseHeader().toString(), msgList1); stringToList(msg1.getResponseBody().toString(), msgList1); stringToList(msg2.getResponseHeader().toString(), msgList2); stringToList(msg2.getResponseBody().toString(), msgList2); } Builder builder = new ZapDiffRowGenerator.Builder(); ZapDiffRowGenerator drg = builder.build(); List<DiffRow> res = drg.generateDiffRows(msgList1, msgList2); int leftLine = 0; int rightLine = 0; for (DiffRow dr : res) { diff_match_patch dmp = new diff_match_patch(); switch (dr.getTag()) { case CHANGE: if (dr.getOldLine().length() > 0) { diffDialog.appendLeftText(leftLine + " : ", true); } if (dr.getNewLine().length() > 0) { diffDialog.appendRightText(rightLine + " : ", true); } /* * Apply the highlighters after adding all the text. * Bit nasty, but otherwise when you insert test if moves the end of the highlighter * so everything is highlighted. */ List<int[]> leftHighlighters = new ArrayList<int[]>(); List<int[]> rightHighlighters = new ArrayList<int[]>(); LinkedList<Diff> diffs = dmp.diff_main(dr.getOldLine(), dr.getNewLine()); for (Diff diff : diffs) { int end = 0; switch (diff.operation) { case EQUAL: diffDialog.appendLeftText(diff.text, false); diffDialog.appendRightText(diff.text, false); break; case DELETE: end = diffDialog.appendLeftText(diff.text, false); leftHighlighters.add(new int[] {end - diff.text.length(), end}); break; case INSERT: end = diffDialog.appendRightText(diff.text, false); rightHighlighters.add(new int[] {end - diff.text.length(), end}); break; } } // These spaces prevent the next lines from moving any highlights at the end of // the line diffDialog.appendLeftText(" ", false); diffDialog.appendRightText(" ", false); for (int[] hl : leftHighlighters) { diffDialog.highlightLeftText(hl[0], hl[1]); } for (int[] hl : rightHighlighters) { diffDialog.highlightRightText(hl[0], hl[1]); } if (dr.getOldLine().length() > 0) { leftLine++; } if (dr.getNewLine().length() > 0) { rightLine++; } break; case EQUAL: diffDialog.appendLeftText(leftLine + " : ", false); diffDialog.appendRightText(rightLine + " : ", false); diffDialog.appendLeftText(dr.getOldLine(), (dr.getTag() != DiffRow.Tag.EQUAL)); diffDialog.appendRightText(dr.getNewLine(), (dr.getTag() != DiffRow.Tag.EQUAL)); leftLine++; rightLine++; break; case DELETE: diffDialog.appendLeftText(leftLine + " : ", (dr.getTag() != DiffRow.Tag.EQUAL)); diffDialog.appendLeftText(dr.getOldLine(), (dr.getTag() != DiffRow.Tag.EQUAL)); leftLine++; break; case INSERT: diffDialog.appendRightText( rightLine + " : ", (dr.getTag() != DiffRow.Tag.EQUAL)); diffDialog.appendRightText(dr.getNewLine(), (dr.getTag() != DiffRow.Tag.EQUAL)); rightLine++; break; } diffDialog.appendLeftText("\n", false); diffDialog.appendRightText("\n", false); } diffDialog.setLeftHeader(msg1.getRequestHeader().getURI().toString()); diffDialog.setRightHeader(msg2.getRequestHeader().getURI().toString()); diffDialog.setVisible(true); // TODO scroll to first diff - initial attempts to do this have failed.. } }
package com.eegeo.menu; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import com.eegeo.tags.TagResources; import com.eegeo.entrypointinfrastructure.MainActivity; import com.eegeo.mobileexampleapp.R; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.AbsListView; import android.widget.BaseExpandableListAdapter; import android.widget.ImageView; import android.widget.RadioGroup.LayoutParams; import android.widget.TextView; public class MenuExpandableListAdapter extends BaseExpandableListAdapter { private MainActivity m_context; private List<MenuItemData> m_headerData; private HashMap<String, List<MenuItemData>> m_childData; MenuExpandableListView m_expandableList; MenuListAnimationHandler m_menuListAnimationHandler; // IH: The off-the-shelf view caching does't play well with animators // Once a view has started animating, it seems to mark it good for reuse. // Workaround by having our own view caches private HashMap<String, View> m_headerViewCache; private HashMap<String, View> m_childrenViewCache; int m_expandedGroupIndex = -1; private LayoutInflater m_inflater; final private int m_groupViewId; final private int m_childViewId; final private int m_childViewWithDetailsId; boolean m_isAnimating = false; public MenuExpandableListAdapter(MainActivity context, MenuExpandableListView expandableList, MenuListAnimationHandler menuListAnimationHandler, final int groupViewId, final int childViewId, final int childViewWithDetailsId) { m_context = context; m_expandableList = expandableList; m_menuListAnimationHandler = menuListAnimationHandler; m_headerData = new ArrayList<MenuItemData>(); m_childData = new HashMap<String, List<MenuItemData>>(); m_inflater = LayoutInflater.from(m_context); m_groupViewId = groupViewId; m_childViewId = childViewId; m_childViewWithDetailsId = childViewWithDetailsId; m_headerViewCache = new HashMap<String, View>(); m_childrenViewCache = new HashMap<String, View>(); } public boolean isAnimating() { return m_isAnimating; } public void clearData() { m_headerData.clear(); m_childData.clear(); m_headerViewCache.clear(); m_childrenViewCache.clear(); } public void setData(final List<String> headerData, final HashMap<String, List<String>> childData) { clearData(); for (String serialisedJson : headerData) { MenuItemData i = MenuItemData.fromJson(serialisedJson); m_headerData.add(i); } for (Map.Entry<String, List<String>> entry : childData.entrySet()) { String key = entry.getKey(); List<String> values = entry.getValue(); List<MenuItemData> items = new ArrayList<MenuItemData>(); for (String jsonString : values) { items.add(MenuItemData.fromJson(jsonString)); } m_childData.put(key, items); } notifyDataSetChanged(); } @Override public Object getChild(int groupPosition, int childPosition) { String key = m_headerData.get(groupPosition).getText(); MenuItemData child = m_childData.get(key).get(childPosition); return child; } @Override public long getChildId(int groupPosition, int childPosition) { return childPosition; } @Override public View getChildView(int groupPosition, int childPosition, boolean isLastChild, View convertView, ViewGroup parent) { View itemView; String key = m_headerData.get(groupPosition).getText() + Integer.toString(childPosition); if (m_childrenViewCache.containsKey(key)) { itemView = m_childrenViewCache.get(key); } else { MenuItemData menuItemData = (MenuItemData) getChild(groupPosition, childPosition); final int viewToInflate = menuItemData.hasDetails() ? m_childViewWithDetailsId : m_childViewId; itemView = inflateView(viewToInflate, menuItemData); // IH: In order for animator to operate, layout params can't be null. So explicitly assign here. // If this isn't done, layout param setting is deferred until view is rendered, meaning animator // will attempt to operate on null value AbsListView.LayoutParams viewLayoutParams = new AbsListView.LayoutParams(LayoutParams.MATCH_PARENT, 1); itemView.setLayoutParams(viewLayoutParams); itemView.setScaleY(0.0f); MenuListItemAnimationListener listener = new MenuListItemAnimationListener(groupPosition, true, this); m_menuListAnimationHandler.animateItemView(itemView, true, listener); m_childrenViewCache.put(key, itemView); } jumpToTopOfMenuIfAnimating(); return itemView; } private void jumpToTopOfMenuIfAnimating() { if (m_isAnimating) { m_expandableList.setSelectedGroup(0); } } @Override public int getChildrenCount(int groupPosition) { MenuItemData viewModel = (MenuItemData)m_headerData.get(groupPosition); String key = viewModel.getText(); return m_childData.get(key).size(); } @Override public Object getGroup(int groupPosition) { return m_headerData.get(groupPosition); } @Override public int getGroupCount() { return m_headerData.size(); } @Override public long getGroupId(int groupPosition) { return groupPosition; } @Override public View getGroupView(int groupPosition, boolean isExpanded, View convertView, ViewGroup parent) { View itemView; MenuItemData menuItemData = (MenuItemData)getGroup(groupPosition); String key = m_headerData.get(groupPosition).getText(); if (m_headerViewCache.containsKey(key)) { itemView = m_headerViewCache.get(key); } else { itemView = inflateView(m_groupViewId, menuItemData); m_headerViewCache.put(key, itemView); } List<MenuItemData> children = m_childData.get(menuItemData.getText()); Boolean isExpandable = children != null && children.size() > 0; View arrowView = itemView.findViewById(R.id.menu_list_openable_shape); if(arrowView != null) { arrowView.setVisibility(isExpandable ? View.VISIBLE : View.GONE); } return itemView; } @Override public boolean hasStableIds() { return false; } @Override public boolean isChildSelectable(int groupPosition, int childPosition) { return true; } @Override public void onGroupExpanded(int groupPosition) { markOtherGroupItemsForCollapse(groupPosition); m_expandedGroupIndex = groupPosition; } public boolean tryAnimateGroupExpand(int groupToExpand) { final int numberOfChildrenInGroup = getChildrenCount(groupToExpand); if (numberOfChildrenInGroup == 0) { return false; } m_isAnimating = true; triggerGroupAnimations(groupToExpand, true); return true; } private void markOtherGroupItemsForCollapse(int groupToExpand) { if (m_expandedGroupIndex != -1 && m_expandedGroupIndex != groupToExpand) { triggerGroupAnimations(m_expandedGroupIndex, false); } } public boolean tryAnimateGroupCollapse(int groupToCollapse) { m_expandedGroupIndex = -1; final int numberOfChildrenInGroup = getChildrenCount(groupToCollapse); if (numberOfChildrenInGroup == 0) { return false; } m_isAnimating = true; triggerGroupAnimations(groupToCollapse, false); return true; } public void onFinishedItemAnimation(int groupPosition, boolean didCollapse) { m_isAnimating = false; if (didCollapse && m_expandableList.isGroupExpanded(groupPosition)) { m_expandableList.delayCollapseGroup(groupPosition); } } private void triggerGroupAnimations(int groupIndex, boolean isExpanding) { triggerAnimationOnArrowView(groupIndex, isExpanding); triggerAnimationsOnChildViews(groupIndex, isExpanding); } private void triggerAnimationsOnChildViews(int groupIndex, boolean isExpanding) { final int numberOfChildrenInGroup = getChildrenCount(groupIndex); String groupKeyPortion = m_headerData.get(groupIndex).getText(); for (int i = 0; i < numberOfChildrenInGroup; ++i) { String childKeyPortion = Integer.toString(i); String combinedKey = groupKeyPortion + childKeyPortion; if (m_childrenViewCache.containsKey(combinedKey)) { MenuListItemAnimationListener listener = new MenuListItemAnimationListener(groupIndex, isExpanding, this); m_menuListAnimationHandler.animateItemView(m_childrenViewCache.get(combinedKey), isExpanding, listener); } } } public void triggerAnimationOnArrowView(int groupIndex, boolean rotateCounterClockwise) { String groupKeyPortion = m_headerData.get(groupIndex).getText(); if (m_headerViewCache.containsKey(groupKeyPortion)) { View itemView = m_headerViewCache.get(groupKeyPortion); View arrowView = itemView.findViewById(R.id.menu_list_openable_shape); m_menuListAnimationHandler.animateHeaderArrow(arrowView, rotateCounterClockwise); } } private View inflateView(int viewId, MenuItemData itemData) { View itemView = (View) m_inflater.inflate(viewId, null); TextView itemText = (TextView)itemView.findViewById(R.id.menu_list_item_name); itemText.setText(itemData.getText()); if (itemData.hasDetails()) { TextView detailsText = (TextView)itemView.findViewById(R.id.menu_list_item_detail); detailsText.setText(itemData.getDetails()); } ImageView itemIcon = (ImageView)itemView.findViewById(R.id.menu_list_item_icon); if (itemIcon != null) { itemIcon.setImageResource(TagResources.getIconForResourceName(m_context, itemData.getIcon())); } return itemView; } }
/******************************************************************************* "FreePastry" Peer-to-Peer Application Development Substrate Copyright 2002-2007, Rice University. Copyright 2006-2007, Max Planck Institute for Software Systems. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - Neither the name of Rice University (RICE), Max Planck Institute for Software Systems (MPI-SWS) nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. This software is provided by RICE, MPI-SWS and the contributors on an "as is" basis, without any representations or warranties of any kind, express or implied including, but not limited to, representations or warranties of non-infringement, merchantability or fitness for a particular purpose. In no event shall RICE, MPI-SWS or contributors be liable for any direct, indirect, incidental, special, exemplary, or consequential damages (including, but not limited to, procurement of substitute goods or services; loss of use, data, or profits; or business interruption) however caused and on any theory of liability, whether in contract, strict liability, or tort (including negligence or otherwise) arising in any way out of the use of this software, even if advised of the possibility of such damage. *******************************************************************************/ package rice.pastry.socket.internet; import java.io.IOException; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.net.BindException; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.Map; import org.mpisws.p2p.transport.multiaddress.MultiInetSocketAddress; import org.mpisws.p2p.transport.networkinfo.CantVerifyConnectivityException; import org.mpisws.p2p.transport.networkinfo.ConnectivityResult; import rice.Continuation; import rice.environment.Environment; import rice.environment.logging.Logger; import rice.environment.params.Parameters; import rice.p2p.commonapi.Cancellable; import rice.pastry.Id; import rice.pastry.NodeIdFactory; import rice.pastry.PastryNode; import rice.pastry.socket.nat.CantFindFirewallException; import rice.pastry.socket.nat.NATHandler; import rice.pastry.socket.nat.StubNATHandler; import rice.pastry.socket.nat.connectivityverifiier.ConnectivityVerifier; import rice.pastry.socket.nat.connectivityverifiier.ConnectivityVerifierImpl; import rice.pastry.socket.nat.rendezvous.RendezvousSocketPastryNodeFactory; import rice.selector.TimerTask; /** * This Factory is designed for real deployments (with NATs). * * Optimized for the following use cases (in order) * 1) Internet Routable, No firewall * 2) Internet Routable, Firewall (not NAT) * 3) NAT: User Configured Port Forwarding * 4) NAT: UPnP * 5) NAT: No port forwarding * * @author Jeff Hoye * */ public class InternetPastryNodeFactory extends RendezvousSocketPastryNodeFactory { /** * NAT policy variables */ public static final int ALWAYS = 1; public static final int PREFIX_MATCH = 2; public static final int NEVER = 3; /** * Don't check bootstrap nodes */ public static final int BOOT = 4; public static final int OVERWRITE = 1; public static final int USE_DIFFERENT_PORT = 2; public static final int FAIL = 3; public static final int RENDEZVOUS = 4; NATHandler natHandler; ConnectivityVerifier connectivityVerifier; Collection<InetSocketAddress> probeAddresses; /** * The ordered addresses of the nat propagation from most external to most internal * * Null if localAddress is Internet routable * 1 value per NAT */ InetAddress[] externalAddresses; public InternetPastryNodeFactory(NodeIdFactory nf, int startPort, Environment env) throws IOException { this(nf, null, startPort, env, null, null, null); } /** * May block for more than a second to determine network information. * * @param nf can be null, but must call newNode() with a NodeId of the new PastryNode * @param bindAddress the NIC to use (null will choose one that can access the Internet) * @param startPort the port of the first created node, will be incremented for additional nodes, * can be specified on a per-Node basis by calling newNode() with a MultiInetSocketAddress * @param env can't be null * @param handler will attempt to use SBBI's UPnP library if null, unless blocked by deleting the * param "nat_handler_class" * @param probeAddresses a list of bootstrap nodes' Internet routable addresses, used to establish * firewall information * @param externalAddresses ordered addresses of the nat propagation from most external to most internal, * null will use natHandler and probeAddresses to determine this */ public InternetPastryNodeFactory(NodeIdFactory nf, InetAddress bindAddress, int startPort, Environment env, NATHandler handler, Collection<InetSocketAddress> probeAddresses, InetAddress[] externalAddresses) throws IOException { super(nf, bindAddress, startPort, env, false); Parameters params = env.getParameters(); // get a natHandler this.natHandler = handler; if (natHandler == null) { this.natHandler = getDefaultNatHandler(env, this.localAddress); } this.probeAddresses = probeAddresses; this.externalAddresses = externalAddresses; if (params.contains("external_address")) { externalAddresses = new InetAddress[1]; externalAddresses[0] = params.getInetSocketAddress("external_address").getAddress(); } this.connectivityVerifier = new ConnectivityVerifierImpl(this); // sets/verifies externalAddress findExternalAddressIfNecessary(this.localAddress); // blocking call } /** * Return a NATHandler * * @param env * @param localAddress the address of the interface we should search for a NAT on * @return */ @SuppressWarnings("unchecked") protected NATHandler getDefaultNatHandler(Environment env, InetAddress localAddress) { Parameters params = env.getParameters(); if (params.contains("nat_handler_class")) { try { Class natHandlerClass = Class.forName(params.getString("nat_handler_class")); Class[] args = {Environment.class, InetAddress.class}; // Class[] args = new Class[2]; // args[0] = environment.getClass(); // args[1] = InetAddress.class; Constructor constructor = natHandlerClass.getConstructor(args); Object[] foo = {env, localAddress}; return (NATHandler)constructor.newInstance(foo); } catch (ClassNotFoundException e) { if (logger.level <= Logger.INFO) logger.log("Didn't find UPnP libs, skipping UPnP"); return new StubNATHandler(env, localAddress); // natHandler = new SocketNatHandler(environment, new InetSocketAddress(localAddress,port), pAddress); } catch (NoClassDefFoundError e) { if (logger.level <= Logger.INFO) logger.log("Didn't find UPnP libs, skipping UPnP"); return new StubNATHandler(env, localAddress); // natHandler = new SocketNatHandler(environment, new InetSocketAddress(localAddress,port), pAddress); } catch (InvocationTargetException e) { if (logger.level <= Logger.INFO) logger.log("Didn't find UPnP libs, skipping UPnP"); return new StubNATHandler(env, localAddress); } catch (Exception e) { if (logger.level <= Logger.WARNING) logger.logException("Error constructing NATHandler.",e); throw new RuntimeException(e); } } else { return new StubNATHandler(env, localAddress); // return new SBBINatHandler(environment, this.localAddress); } } protected boolean shouldFindExternalAddress(InetAddress address) { switch (getFireWallPolicyVariable("nat_search_policy")) { case NEVER: return false; case PREFIX_MATCH: return !isInternetRoutablePrefix(address); case ALWAYS: return true; } return true; } /** * Sets/Verifies externalAddresses * * Return true if all is well. * Return false if a firewall should be found but couldn't. * * Throws an exception if firewall was found and disagrees with the existing externalAddress (if it's not null). * * @param address * @return * @throws IOException */ protected boolean findExternalAddressIfNecessary(InetAddress address /*, Collection<InetSocketAddress> probeAddresses*/) throws IOException { if (!shouldFindExternalAddress(address)) return true; try { natHandler.findFireWall(address); // warning, this is blocking... } catch (CantFindFirewallException cffe) { if (logger.level <= Logger.INFO) logger.log("Can't find firewall, continuing. For better performance, enable UPnP. Will try to verify if user configured a port forward rule..."+cffe); // ignore return false; } catch (IOException ioe) { if (logger.level <= Logger.WARNING) logger.log(ioe.toString()); // ignore return false; } if (this.externalAddresses == null) { this.externalAddresses = new InetAddress[1]; this.externalAddresses[0] = natHandler.getFireWallExternalAddress(); if (this.externalAddresses[0] == null) { // couldn't find firewall return false; } else { // all is well return true; } } else { if (externalAddresses[0].equals(natHandler.getFireWallExternalAddress())) { // the firewall says the same thing as our external address list return true; } else { // the firewall disagrees with the existing external address list throw new IOException("Firewall disagrees with the externalAddresses. externalAddresses:"+externalAddresses[0]+" firewall:"+natHandler.getFireWallExternalAddress()); } } } /** * This is where the action takes place. * * 1) Make sure the proxyAddress is valid and good * a) add the external address/port if needed * * 2) Try to configure the firewall * * 3) call newNodeSelectorHelper * */ @Override protected void newNodeSelector(final Id nodeId, final MultiInetSocketAddress proxyAddress, final Continuation<PastryNode, IOException> deliverResultToMe, Map<String, Object> initialVars) { // make sure the innermost address is valid if (!proxyAddress.getInnermostAddress().getAddress().equals(this.localAddress)) { throw new RuntimeException("proxyAddress.innermostAddress() must be the local bind address. proxyAddress:"+proxyAddress+" bindAddress:"+this.localAddress); } // may be running on the LAN, don't bother with anything if (!shouldFindExternalAddress(proxyAddress.getInnermostAddress().getAddress())) { verifyConnectivityThenMakeNewNode(nodeId, proxyAddress, deliverResultToMe); return; } // we know we are in a NAT type situation now if (proxyAddress.getNumAddresses() > 2) { throw new RuntimeException("this factory only supports 1 layer deep NAT configurations try setting nat_search_policy = never if you are sure that your NAT configuration is "+proxyAddress); } // we know that there are 1 or 2 addresses, and that the first is the localAddress if (proxyAddress.getNumAddresses() == 1) { // determine the pAddress findExternalAddress(nodeId, proxyAddress.getInnermostAddress(), deliverResultToMe); } else { openFirewallPort(nodeId, proxyAddress.getInnermostAddress(), deliverResultToMe, proxyAddress.getOutermostAddress().getAddress(), proxyAddress.getOutermostAddress().getPort()); } } // this needs to ask natted nodes for known non-natted nodes, then call findExternalAddress again, /** * Finds the external address, calls openFirewallPort() */ protected void findExternalAddress(final Id nodeId, final InetSocketAddress bindAddress, final Continuation<PastryNode, IOException> deliverResultToMe) { // see if it's specified in the configuration if (environment.getParameters().contains("external_address")) { // get it from the param try { InetSocketAddress pAddress = environment.getParameters().getInetSocketAddress("external_address"); openFirewallPort(nodeId, bindAddress, deliverResultToMe, pAddress.getAddress(), pAddress.getPort()); } catch (UnknownHostException uhe) { deliverResultToMe.receiveException(uhe); } } else { // pull self from probeAddresses Collection<InetSocketAddress> myProbeAddresses = null; Collection<InetSocketAddress> nonInternetRoutable = null; if (this.probeAddresses != null) { myProbeAddresses = new ArrayList<InetSocketAddress>(probeAddresses); nonInternetRoutable = new ArrayList<InetSocketAddress>(); while(myProbeAddresses.remove(bindAddress)); // pull non-internet routable addresses Iterator<InetSocketAddress> i = myProbeAddresses.iterator(); while (i.hasNext()) { InetSocketAddress foo = i.next(); if (!isInternetRoutablePrefix(foo.getAddress())) { nonInternetRoutable.add(foo); i.remove(); } } } if ((myProbeAddresses == null || myProbeAddresses.isEmpty()) && (nonInternetRoutable != null && !nonInternetRoutable.isEmpty())) { findExternalNodes(nodeId, bindAddress, nonInternetRoutable, deliverResultToMe); } else { findExternalAddressHelper(nodeId,bindAddress,deliverResultToMe, myProbeAddresses); } } } /** * Probe the internalAddresses to get more externalAddresses, then call findExternalAddressHelper */ protected void findExternalNodes(final Id nodeId, final InetSocketAddress bindAddress, final Collection<InetSocketAddress> nonInternetRoutable, final Continuation<PastryNode, IOException> deliverResultToMe) { if (nonInternetRoutable == null || nonInternetRoutable.isEmpty()) findExternalAddressHelper(nodeId, bindAddress, deliverResultToMe, null); connectivityVerifier.findExternalNodes(bindAddress, nonInternetRoutable, new Continuation<Collection<InetSocketAddress>, IOException>() { public void receiveResult(Collection<InetSocketAddress> result) { findExternalAddressHelper(nodeId, bindAddress, deliverResultToMe, result); } public void receiveException(IOException exception) { if (nonInternetRoutable == null || nonInternetRoutable.isEmpty()) findExternalAddressHelper(nodeId, bindAddress, deliverResultToMe, null); } }); } protected void findExternalAddressHelper(final Id nodeId, final InetSocketAddress bindAddress, final Continuation<PastryNode, IOException> deliverResultToMe, Collection<InetSocketAddress> myProbeAddresses) { // try the probeAddresses if (myProbeAddresses != null && !myProbeAddresses.isEmpty()) { connectivityVerifier.findExternalAddress(bindAddress, myProbeAddresses, new Continuation<InetAddress, IOException>() { public void receiveResult(InetAddress result) { if (externalAddresses != null) { if (!externalAddresses[0].equals(result)) { deliverResultToMe.receiveException(new IOException("Probe address ("+result+") does not match specified externalAddress ("+externalAddresses[0]+").")); return; } } openFirewallPort(nodeId, bindAddress, deliverResultToMe, result, -1); } public void receiveException(IOException exception) { deliverResultToMe.receiveException(exception); } }); } else { // try the firewall openFirewallPort(nodeId, bindAddress, deliverResultToMe, natHandler.getFireWallExternalAddress(), -1); } } /** * Attempt to open the firewall on the specified port * if it doesn't work, uses Rendezvous * * @param port the external firewall port to (attempt to) use, -1 to use anything */ protected void openFirewallPort(final Id nodeId, final InetSocketAddress bindAddress, final Continuation<PastryNode, IOException> deliverResultToMe, InetAddress externalAddress, int requestedPort) { Parameters params = environment.getParameters(); int firewallSearchTries = params.getInt("nat_find_port_max_tries"); String firewallAppName = params.getString("nat_app_name"); int port; if (requestedPort == -1) { port = bindAddress.getPort(); } else { port = requestedPort; } /** * Set this to true to just give up and use Rendezvous */ boolean rendezvous = false; try { // if we can talk to the firewall at all if (natHandler.getFireWallExternalAddress() == null) { rendezvous = true; } else { int availableFireWallPort = natHandler.findAvailableFireWallPort(bindAddress.getPort(), port, firewallSearchTries, firewallAppName); if (requestedPort == -1 || availableFireWallPort == port) { // success port = availableFireWallPort; } else { // decide how to handle this switch (getFireWallPolicyVariable("nat_state_policy")) { case OVERWRITE: break; case FAIL: // todo: would be useful to pass the app that is bound to that // port deliverResultToMe.receiveException(new BindException( "Firewall is already bound to the requested port:" + externalAddress+":"+port)); return; case RENDEZVOUS: rendezvous = true; break; case USE_DIFFERENT_PORT: port = availableFireWallPort; break; } } } if (rendezvous) { // this could go either way... since the connectivity check will fail, it'll show up natted, but it used to be zero port = bindAddress.getPort(); } else { natHandler.openFireWallPort(bindAddress.getPort(), port, firewallAppName); } } catch (IOException ioe) { // doesn't matter, can just rendezvous port = 0; } // if we found an externalAddress under any mechanism, use it, otherwise dont. MultiInetSocketAddress fullAddress; if (externalAddress == null) { fullAddress = new MultiInetSocketAddress(bindAddress); } else { fullAddress = new MultiInetSocketAddress(new InetSocketAddress(externalAddress, port), bindAddress); } verifyConnectivityThenMakeNewNode(nodeId, fullAddress, deliverResultToMe); } /** * Verifies the connectivity (if necessary), then calls super.newNodeSelector() * * if connectivity fails, then uses Rendezvous * * @param nodeId * @param proxyAddress * @param deliverResultToMe */ protected void verifyConnectivityThenMakeNewNode(final Id nodeId, final MultiInetSocketAddress proxyAddress, final Continuation<PastryNode, IOException> deliverResultToMe) { if (proxyAddress.getOutermostAddress().getPort()<1) { newNodeSelector(nodeId, proxyAddress, deliverResultToMe, null, true); return; } if (!shouldCheckConnectivity(proxyAddress, probeAddresses)) { newNodeSelector(nodeId, proxyAddress, deliverResultToMe, null, false); return; } final boolean[] timeout = new boolean[1]; timeout[0] = false; final Cancellable[] cancelme = new Cancellable[1]; final TimerTask timer = new TimerTask() { @Override public void run() { timeout[0] = true; // clear up the bind address cancelme[0].cancel(); // invoke to let the cancel succeed, seems to need to take a second sometimes environment.getSelectorManager().schedule(new TimerTask() { public void run() { newNodeSelector(nodeId, proxyAddress, deliverResultToMe, null, true); } }, 1000); } }; environment.getSelectorManager().getTimer().schedule(timer, 10000); cancelme[0] = connectivityVerifier.verifyConnectivity(proxyAddress, probeAddresses, new ConnectivityResult() { boolean udpSuccess = false; boolean tcpSuccess = false; public void udpSuccess(InetSocketAddress from, Map<String, Object> options) { udpSuccess = true; complete(); } public void tcpSuccess(InetSocketAddress from, Map<String, Object> options) { tcpSuccess = true; complete(); } public void complete() { if (tcpSuccess && udpSuccess && !timeout[0]) { timer.cancel(); newNodeSelector(nodeId, proxyAddress, deliverResultToMe, null, false); } } public void receiveException(Exception e) { timer.cancel(); if (e instanceof CantVerifyConnectivityException) { // mark node firewalled if internal address matches the prefix, otherwise not firewalled if (shouldFindExternalAddress(proxyAddress.getInnermostAddress().getAddress())) { newNodeSelector(nodeId, proxyAddress, deliverResultToMe, null, true); } else { newNodeSelector(nodeId, proxyAddress, deliverResultToMe, null, false); } } else { newNodeSelector(nodeId, proxyAddress, deliverResultToMe, null, true); } } }); } protected boolean isInternetRoutable(MultiInetSocketAddress proxyAddress) { if (proxyAddress.getNumAddresses() == 1) { InetSocketAddress address = proxyAddress.getInnermostAddress(); if (isInternetRoutablePrefix(address.getAddress())) { return true; } } return false; } protected int getFireWallPolicyVariable(String key) { String val = environment.getParameters().getString(key); if (val.equalsIgnoreCase("prefix")) return PREFIX_MATCH; if (val.equalsIgnoreCase("change")) return USE_DIFFERENT_PORT; if (val.equalsIgnoreCase("never")) return NEVER; if (val.equalsIgnoreCase("overwrite")) return OVERWRITE; if (val.equalsIgnoreCase("always")) return ALWAYS; if (val.equalsIgnoreCase("boot")) return BOOT; if (val.equalsIgnoreCase("fail")) return FAIL; if (val.equalsIgnoreCase("rendezvous")) return RENDEZVOUS; throw new RuntimeException("Unknown value " + val + " for " + key); } /** * * @param proxyAddress * @return */ protected boolean shouldCheckConnectivity(MultiInetSocketAddress proxyAddress, Collection<InetSocketAddress> bootstraps) { if (bootstraps == null) return false; switch (getFireWallPolicyVariable("firewall_test_policy")) { case NEVER: return false; case BOOT: // don't do it if we're the bootstrap node if (!bootstraps.contains(proxyAddress.getOutermostAddress())) return true; // continue to PREFIX_MATCH case PREFIX_MATCH: return !isInternetRoutable(proxyAddress); case ALWAYS: return true; } // switch return true; // will probably never happen } }
/* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.bookkeeper.test; import java.io.File; import java.io.IOException; import java.net.InetAddress; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import org.apache.bookkeeper.bookie.Bookie; import org.apache.bookkeeper.bookie.BookieException; import org.apache.bookkeeper.client.BookKeeperTestClient; import org.apache.bookkeeper.conf.AbstractConfiguration; import org.apache.bookkeeper.conf.ClientConfiguration; import org.apache.bookkeeper.conf.ServerConfiguration; import org.apache.bookkeeper.conf.TestBKConfiguration; import org.apache.bookkeeper.metastore.InMemoryMetaStore; import org.apache.bookkeeper.net.BookieSocketAddress; import org.apache.bookkeeper.proto.BookieServer; import org.apache.bookkeeper.replication.AutoRecoveryMain; import org.apache.bookkeeper.replication.Auditor; import org.apache.bookkeeper.replication.ReplicationException.CompatibilityException; import org.apache.bookkeeper.replication.ReplicationException.UnavailableException; import org.apache.bookkeeper.util.IOUtils; import org.apache.commons.io.FileUtils; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.ZooKeeper; import org.junit.After; import org.junit.Before; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A class runs several bookie servers for testing. */ public abstract class BookKeeperClusterTestCase { static final Logger LOG = LoggerFactory.getLogger(BookKeeperClusterTestCase.class); // ZooKeeper related variables protected ZooKeeperUtil zkUtil = new ZooKeeperUtil(); protected ZooKeeper zkc; // BookKeeper related variables protected List<File> tmpDirs = new LinkedList<File>(); protected List<BookieServer> bs = new LinkedList<BookieServer>(); protected List<ServerConfiguration> bsConfs = new LinkedList<ServerConfiguration>(); protected int numBookies; protected BookKeeperTestClient bkc; protected ServerConfiguration baseConf = TestBKConfiguration.newServerConfiguration(); protected ClientConfiguration baseClientConf = new ClientConfiguration(); private Map<BookieServer, AutoRecoveryMain> autoRecoveryProcesses = new HashMap<BookieServer, AutoRecoveryMain>(); private boolean isAutoRecoveryEnabled; public BookKeeperClusterTestCase(int numBookies) { this.numBookies = numBookies; baseConf.setAllowLoopback(true); } @Before public void setUp() throws Exception { LOG.info("Setting up test {}", getClass()); InMemoryMetaStore.reset(); setMetastoreImplClass(baseConf); setMetastoreImplClass(baseClientConf); try { // start zookeeper service startZKCluster(); // start bookkeeper service startBKCluster(); } catch (Exception e) { LOG.error("Error setting up", e); throw e; } } @After public void tearDown() throws Exception { LOG.info("TearDown"); // stop bookkeeper service stopBKCluster(); // stop zookeeper service stopZKCluster(); // cleanup temp dirs cleanupTempDirs(); LOG.info("Tearing down test {}", getClass()); } protected File createTempDir(String prefix, String suffix) throws IOException { File dir = IOUtils.createTempDir(prefix, suffix); tmpDirs.add(dir); return dir; } /** * Start zookeeper cluster * * @throws Exception */ protected void startZKCluster() throws Exception { zkUtil.startServer(); zkc = zkUtil.getZooKeeperClient(); } /** * Stop zookeeper cluster * * @throws Exception */ protected void stopZKCluster() throws Exception { zkUtil.killServer(); } /** * Start cluster. Also, starts the auto recovery process for each bookie, if * isAutoRecoveryEnabled is true. * * @throws Exception */ protected void startBKCluster() throws Exception { baseClientConf.setZkServers(zkUtil.getZooKeeperConnectString()); if (numBookies > 0) { bkc = new BookKeeperTestClient(baseClientConf); } // Create Bookie Servers (B1, B2, B3) for (int i = 0; i < numBookies; i++) { startNewBookie(); } } /** * Stop cluster. Also, stops all the auto recovery processes for the bookie * cluster, if isAutoRecoveryEnabled is true. * * @throws Exception */ protected void stopBKCluster() throws Exception { if (bkc != null) { bkc.close();; } for (BookieServer server : bs) { server.shutdown(); AutoRecoveryMain autoRecovery = autoRecoveryProcesses.get(server); if (autoRecovery != null && isAutoRecoveryEnabled()) { autoRecovery.shutdown(); LOG.debug("Shutdown auto recovery for bookieserver:" + server.getLocalAddress()); } } bs.clear(); } protected void cleanupTempDirs() throws Exception { for (File f : tmpDirs) { FileUtils.deleteDirectory(f); } } protected ServerConfiguration newServerConfiguration() throws Exception { File f = createTempDir("bookie", "test"); int port = PortManager.nextFreePort(); return newServerConfiguration(port, zkUtil.getZooKeeperConnectString(), f, new File[] { f }); } protected ClientConfiguration newClientConfiguration() { return new ClientConfiguration(baseConf); } protected ServerConfiguration newServerConfiguration(int port, String zkServers, File journalDir, File[] ledgerDirs) { ServerConfiguration conf = new ServerConfiguration(baseConf); conf.setBookiePort(port); conf.setZkServers(zkServers); conf.setJournalDirName(journalDir.getPath()); String[] ledgerDirNames = new String[ledgerDirs.length]; for (int i=0; i<ledgerDirs.length; i++) { ledgerDirNames[i] = ledgerDirs[i].getPath(); } conf.setLedgerDirNames(ledgerDirNames); return conf; } /** * Get bookie address for bookie at index */ public BookieSocketAddress getBookie(int index) throws Exception { if (bs.size() <= index || index < 0) { throw new IllegalArgumentException("Invalid index, there are only " + bs.size() + " bookies. Asked for " + index); } return bs.get(index).getLocalAddress(); } /** * Get bookie configuration for bookie */ public ServerConfiguration getBkConf(BookieSocketAddress addr) throws Exception { int bkIndex = 0; for (BookieServer server : bs) { if (server.getLocalAddress().equals(addr)) { break; } ++bkIndex; } if (bkIndex < bs.size()) { return bsConfs.get(bkIndex); } return null; } /** * Kill a bookie by its socket address. Also, stops the autorecovery process * for the corresponding bookie server, if isAutoRecoveryEnabled is true. * * @param addr * Socket Address * @return the configuration of killed bookie * @throws InterruptedException */ public ServerConfiguration killBookie(BookieSocketAddress addr) throws Exception { BookieServer toRemove = null; int toRemoveIndex = 0; for (BookieServer server : bs) { if (server.getLocalAddress().equals(addr)) { server.shutdown(); toRemove = server; break; } ++toRemoveIndex; } if (toRemove != null) { stopAutoRecoveryService(toRemove); bs.remove(toRemove); return bsConfs.remove(toRemoveIndex); } return null; } /** * Kill a bookie by index. Also, stops the respective auto recovery process * for this bookie, if isAutoRecoveryEnabled is true. * * @param index * Bookie Index * @return the configuration of killed bookie * @throws InterruptedException * @throws IOException */ public ServerConfiguration killBookie(int index) throws Exception { if (index >= bs.size()) { throw new IOException("Bookie does not exist"); } BookieServer server = bs.get(index); server.shutdown(); stopAutoRecoveryService(server); bs.remove(server); return bsConfs.remove(index); } /** * Sleep a bookie * * @param addr * Socket Address * @param seconds * Sleep seconds * @return Count Down latch which will be counted down just after sleep begins * @throws InterruptedException * @throws IOException */ public CountDownLatch sleepBookie(BookieSocketAddress addr, final int seconds) throws Exception { for (final BookieServer bookie : bs) { if (bookie.getLocalAddress().equals(addr)) { final CountDownLatch l = new CountDownLatch(1); Thread sleeper = new Thread() { @Override public void run() { try { bookie.suspendProcessing(); LOG.info("bookie {} is asleep", bookie.getLocalAddress()); l.countDown(); Thread.sleep(seconds*1000); bookie.resumeProcessing(); LOG.info("bookie {} is awake", bookie.getLocalAddress()); } catch (Exception e) { LOG.error("Error suspending bookie", e); } } }; sleeper.start(); return l; } } throw new IOException("Bookie not found"); } /** * Sleep a bookie until I count down the latch * * @param addr * Socket Address * @param l * Latch to wait on * @throws InterruptedException * @throws IOException */ public void sleepBookie(BookieSocketAddress addr, final CountDownLatch l) throws Exception { for (final BookieServer bookie : bs) { if (bookie.getLocalAddress().equals(addr)) { bookie.suspendProcessing(); Thread sleeper = new Thread() { public void run() { try { l.await(); bookie.resumeProcessing(); } catch (Exception e) { LOG.error("Error suspending bookie", e); } } }; sleeper.start(); return; } } throw new IOException("Bookie not found"); } /** * Restart bookie servers. Also restarts all the respective auto recovery * process, if isAutoRecoveryEnabled is true. * * @throws InterruptedException * @throws IOException * @throws KeeperException * @throws BookieException */ public void restartBookies() throws Exception { restartBookies(null); } /** * Restart a bookie. Also restart the respective auto recovery process, * if isAutoRecoveryEnabled is true. * * @param addr * @throws InterruptedException * @throws IOException * @throws KeeperException * @throws BookieException */ public void restartBookie(BookieSocketAddress addr) throws Exception { BookieServer toRemove = null; int toRemoveIndex = 0; for (BookieServer server : bs) { if (server.getLocalAddress().equals(addr)) { server.shutdown(); toRemove = server; break; } ++toRemoveIndex; } if (toRemove != null) { stopAutoRecoveryService(toRemove); bs.remove(toRemove); ServerConfiguration newConfig = bsConfs.remove(toRemoveIndex); Thread.sleep(1000); bs.add(startBookie(newConfig)); bsConfs.add(newConfig); return; } throw new IOException("Bookie not found"); } /** * Restart bookie servers using new configuration settings. Also restart the * respective auto recovery process, if isAutoRecoveryEnabled is true. * * @param newConf * New Configuration Settings * @throws InterruptedException * @throws IOException * @throws KeeperException * @throws BookieException */ public void restartBookies(ServerConfiguration newConf) throws Exception { // shut down bookie server for (BookieServer server : bs) { server.shutdown(); stopAutoRecoveryService(server); } bs.clear(); Thread.sleep(1000); // restart them to ensure we can't for (ServerConfiguration conf : bsConfs) { if (null != newConf) { conf.loadConf(newConf); } bs.add(startBookie(conf)); } } /** * Helper method to startup a new bookie server with the indicated port * number. Also, starts the auto recovery process, if the * isAutoRecoveryEnabled is set true. * * @throws IOException */ public int startNewBookie() throws Exception { ServerConfiguration conf = newServerConfiguration(); bsConfs.add(conf); bs.add(startBookie(conf)); return conf.getBookiePort(); } /** * Helper method to startup a bookie server using a configuration object. * Also, starts the auto recovery process if isAutoRecoveryEnabled is true. * * @param conf * Server Configuration Object * */ protected BookieServer startBookie(ServerConfiguration conf) throws Exception { BookieServer server = new BookieServer(conf); server.start(); if (bkc == null) { bkc = new BookKeeperTestClient(baseClientConf); } int port = conf.getBookiePort(); String host = InetAddress.getLocalHost().getHostAddress(); if (conf.getUseHostNameAsBookieID()) { host = InetAddress.getLocalHost().getCanonicalHostName(); } while ( (!conf.isForceReadOnlyBookie() && (bkc.getZkHandle().exists( "/ledgers/available/" + host + ":" + port, false) == null)) || ( conf.isForceReadOnlyBookie() && ((bkc.getZkHandle().exists( "/ledgers/available/readonly/" + host + ":" + port, false) == null))) ) { Thread.sleep(500); } bkc.readBookiesBlocking(); LOG.info("New bookie on port " + port + " has been created."); try { startAutoRecovery(server, conf); } catch (CompatibilityException ce) { LOG.error("Exception while starting AutoRecovery!", ce); } catch (UnavailableException ue) { LOG.error("Exception while starting AutoRecovery!", ue); } return server; } /** * Start a bookie with the given bookie instance. Also, starts the auto * recovery for this bookie, if isAutoRecoveryEnabled is true. */ protected BookieServer startBookie(ServerConfiguration conf, final Bookie b) throws Exception { BookieServer server = new BookieServer(conf) { @Override protected Bookie newBookie(ServerConfiguration conf) { return b; } }; server.start(); int port = conf.getBookiePort(); String host = InetAddress.getLocalHost().getHostAddress(); if (conf.getUseHostNameAsBookieID()) { host = InetAddress.getLocalHost().getCanonicalHostName(); } while (bkc.getZkHandle().exists( "/ledgers/available/" + host + ":" + port, false) == null) { Thread.sleep(500); } bkc.readBookiesBlocking(); LOG.info("New bookie on port " + port + " has been created."); try { startAutoRecovery(server, conf); } catch (CompatibilityException ce) { LOG.error("Exception while starting AutoRecovery!", ce); } catch (UnavailableException ue) { LOG.error("Exception while starting AutoRecovery!", ue); } return server; } public void setMetastoreImplClass(AbstractConfiguration conf) { conf.setMetastoreImplClass(InMemoryMetaStore.class.getName()); } /** * Flags used to enable/disable the auto recovery process. If it is enabled, * starting the bookie server will starts the auto recovery process for that * bookie. Also, stopping bookie will stops the respective auto recovery * process. * * @param isAutoRecoveryEnabled * Value true will enable the auto recovery process. Value false * will disable the auto recovery process */ public void setAutoRecoveryEnabled(boolean isAutoRecoveryEnabled) { this.isAutoRecoveryEnabled = isAutoRecoveryEnabled; } /** * Flag used to check whether auto recovery process is enabled/disabled. By * default the flag is false. * * @return true, if the auto recovery is enabled. Otherwise return false. */ public boolean isAutoRecoveryEnabled() { return isAutoRecoveryEnabled; } private void startAutoRecovery(BookieServer bserver, ServerConfiguration conf) throws Exception { if (isAutoRecoveryEnabled()) { AutoRecoveryMain autoRecoveryProcess = new AutoRecoveryMain(conf); autoRecoveryProcess.start(); autoRecoveryProcesses.put(bserver, autoRecoveryProcess); LOG.debug("Starting Auditor Recovery for the bookie:" + bserver.getLocalAddress()); } } private void stopAutoRecoveryService(BookieServer toRemove) throws Exception { AutoRecoveryMain autoRecoveryMain = autoRecoveryProcesses .remove(toRemove); if (null != autoRecoveryMain && isAutoRecoveryEnabled()) { autoRecoveryMain.shutdown(); LOG.debug("Shutdown auto recovery for bookieserver:" + toRemove.getLocalAddress()); } } /** * Will starts the auto recovery process for the bookie servers. One auto * recovery process per each bookie server, if isAutoRecoveryEnabled is * enabled. */ public void startReplicationService() throws Exception { int index = -1; for (BookieServer bserver : bs) { startAutoRecovery(bserver, bsConfs.get(++index)); } } /** * Will stops all the auto recovery processes for the bookie cluster, if * isAutoRecoveryEnabled is true. */ public void stopReplicationService() throws Exception{ if(false == isAutoRecoveryEnabled()){ return; } for (Entry<BookieServer, AutoRecoveryMain> autoRecoveryProcess : autoRecoveryProcesses .entrySet()) { autoRecoveryProcess.getValue().shutdown(); LOG.debug("Shutdown Auditor Recovery for the bookie:" + autoRecoveryProcess.getKey().getLocalAddress()); } } public Auditor getAuditor(int timeout, TimeUnit unit) throws Exception { final long timeoutAt = System.nanoTime() + TimeUnit.NANOSECONDS.convert(timeout, unit); while (System.nanoTime() < timeoutAt) { for (AutoRecoveryMain p : autoRecoveryProcesses.values()) { Auditor a = p.getAuditor(); if (a != null) { return a; } } Thread.sleep(100); } throw new Exception("No auditor found"); } /** * Check whether the InetSocketAddress was created using a hostname or an IP * address. Represent as 'hostname/IPaddress' if the InetSocketAddress was * created using hostname. Represent as '/IPaddress' if the * InetSocketAddress was created using an IPaddress * * @param addr * inetaddress * @return true if the address was created using an IP address, false if the * address was created using a hostname */ public static boolean isCreatedFromIp(BookieSocketAddress addr) { return addr.getSocketAddress().toString().startsWith("/"); } }
package net; import java.util.Enumeration; import java.util.Hashtable; import common.net.Message; import peersim.core.Node; /** * Cointains the messages used in the bandwidth management protocol and * the error codes for data trasnfer. * * @author Alessandro Russo * @version $Revision: 0.02$ */ public class NetworkMessage extends Message { /** * Sender node */ protected final Node src_node; /** * Receiver node */ protected final Node dst_node; /** * re_send_counter. */ protected int re_send_counter; /** * Message for updating upload, when pending upload is present */ protected final static byte UPD_UP = 10; /** * Message to balance downloads */ //protected final static byte BALANCE_DOWNLOAD = 20; /** * Message to balance downloads */ //protected final static byte BALANCE_UPLOAD = 30; /** * Message to send request */ protected final static byte SEND = 40; /** * Message send ok!!! */ //protected final static byte SEND_ACCEPTED = 41; /** * Message to send request */ protected final static byte SEND_REFUSED = 45; /** * Message to re-send request, after failures */ //protected final static byte RESEND_REQUEST = 46; /** * control bandwidth failure notification. It is * related errors of BWD_CONTROL option * * it happens when a minimum bwd was requested and * there was not enough resource for proper * allocation */ protected final static byte BWD_FAILED = 47; /** * requester not found * * it might be useful when a flow is * cancelled for any reason */ protected final static byte REQ_NOTFOUND = 48; protected final static byte NO_RESOURCES = 49; protected final static byte OK = 50; protected final static byte ERROR = 51; /** * Message to send request */ //protected final static byte RETRIVE_REQUEST = 50; /** * Message to send request */ //protected final static byte TRANSFER_ACCOMPLISHED = 60; /** * connection closed */ //protected final static byte CONNECTION_CLOSED = 90; /** * Message for updating download, when pending download is present */ protected final static byte SEND_FIN = 100; /** * Message used to notificate no upload bandwidth */ //public final static byte NO_UP = -1; /** * Message used to notificate no download bandwidth */ //public final static byte NO_DOWN = -2; //keep state of requests of this message /* * SEND_REQUEST * SEND_ACCEPTED * SEND_REFUSED * ??? TRANSFER_ACCOMPLISHED * UPD_DOWN ACTIVE! END */ //connections tables private Hashtable<Long, Connection> send_requests; //private Hashtable<Long, Connection> refused_sends; //private Hashtable<Long, Connection> bwd_failed_sends; private Hashtable<Long, Connection> end_sends; private boolean control; /** * NetworkMessage is a default constructor. * * @param source source (peersim) node. * @param destination destination (peersim) node. * @param message_type network message type * as defined by NetworkMessage constants. * @param clength content length in bits. * @param end_to_end_delay end to end delay * in milliseconds * @param uplayer_id uuper layer identifier * , it is useful for callback methods that * performs procedures such as process * end connection, new incoming connections * or failures * @param content with the upper layer content * @param conn_id with a unique connection * identifier * @param bd_id brodcast domain id * @param deadline deadline time */ public NetworkMessage(long ts, Node source, Node destination) { //for changing header values, please check //Message.* constants super(source.getID(), destination.getID(), 0, 0L, 0L, null, ts); this.src_node = source; this.dst_node = destination; //this.transport_id=transport_id; //we assume that this message has a datagram-like header by default //this.header_len_in_bits = Message.DATAGRAM_HEADER_SIZE; this.re_send_counter=0; this.send_requests= new Hashtable<Long, Connection>(); //this.refused_sends= new Hashtable<Long, Connection>(); this.end_sends= new Hashtable<Long, Connection>(); //this.bwd_failed_sends = new Hashtable<Long, Connection>(); this.control=false; } /** * * clone: useful for recreate network messages. * * IMPORTANT TIP: * It is essential to recreates a new network * message before re-sending it through peersim * * */ public Object clone() { NetworkMessage new_msg = new NetworkMessage(this.timestamp,this.src_node, this.dst_node); return new_msg; } /** * Get the sender node. * @return Sender node. */ public Node getSrc() { return this.src_node; } /** * Get the receiver node. * @return Receiver node. */ public Node getDst() { return this.dst_node; } //public void setConnectionIdentifier(long connection_id) { // this.connection_id=connection_id; //} public int getResendCounter(){ return this.re_send_counter; } public void setResendCounter(int counter){ this.re_send_counter=counter; } /** * Check if the Network message element is the same of the one given or not. * @param o object. * @return True if they are the same, false otherwise. */ public boolean equals(Object o) { NetworkMessage msg = null; if(o==null) return false; try { msg = (NetworkMessage)o; } catch (ClassCastException e) { return false; } if((msg.getSrc()==this.src_node)&&(msg.getTimeStamp()==this.timestamp)) return true; else return false; } /** *Printable version of bandwidth message. * @return String containing labels and values of current bandwidth message. */ public String toString() { return "Sender " + this.src_node.getID() + " | Receiver " + this.dst_node.getID() + " | Timestamp " + this.timestamp + "."; } public Connection removeConnection(byte event, Connection c){ if((event==NetworkMessage.SEND)){ //send request SYN return this.removeSendRequest(c); }else if(event==NetworkMessage.SEND_FIN){ //accepted and on going connection //connection end return this.removeEndSend(c); }/*else if(event==NetworkMessage.BWD_FAILED){ return this.removeBwdFailedSend(c); }else if(event==NetworkMessage.SEND_REFUSED){ return this.removeRefusedSend(c); }*/ return null; } public Connection removeConnection(long cid){ Connection removed = this.end_sends.remove(Long.valueOf(cid)); if(removed!=null) return removed; removed = this.send_requests.remove(Long.valueOf(cid)); if(removed!=null) return removed; /*removed = this.bwd_failed_sends.remove(Long.valueOf(cid)); if(removed!=null) return removed; removed = this.refused_sends.remove(Long.valueOf(cid));*/ return removed; } /** * puts a connections to a message. it is about * a event that happens in the timestamp * of this message (a way to gathering and * treating messages in correct order) * * by now, it is able to handle the following * types of messages: * NetworkMessage.SEND_REQUEST, * NetworkMessage.SEND_REFUSED, * and NetworkMessage.UPD_DOWN * * @param event type of event (check valid types * here above * @param c connection concerned by this event * * @return true if the connection was successfully * added */ public boolean putConnection(byte event, Connection c){ if((event==NetworkMessage.SEND)){ //send request, SYN if(this.putSendRequest(c)==null) return true; }else if(event==NetworkMessage.SEND_FIN){ //connection accepted and on going //connection end, FIN if(this.putEndSend(c)==null) return true; }/*else if(event==NetworkMessage.BWD_FAILED){ if(this.putBwdFailedSend(c)==null) return true; }else if(event==NetworkMessage.SEND_REFUSED){ if(this.putRefusedSend(c)==null) return true; }*/ return false; } public Enumeration<Connection> getConnections(byte event){ if(event==NetworkMessage.SEND){ //accepted and on going connections return this.send_requests.elements(); }else if(event==NetworkMessage.SEND_FIN){ //connection end return this.end_sends.elements(); }/*else if(event==NetworkMessage.BWD_FAILED){ return this.bwd_failed_sends.elements(); }else if(event==NetworkMessage.SEND_REFUSED){ return this.refused_sends.elements(); }*/ return null; } /* * this.send_requests= new Hashtable<Long, Connection>(); this.accepted_sends= new Hashtable<Long, Connection>(); this.refused_sends= new Hashtable<Long, Connection>(); this.end_sends= new Hashtable<Long, Connection>(); */ //removes public Connection removeSendRequest(Connection c){ return this.send_requests.remove(Long.valueOf(c.getId())); } /*public Connection removeRefusedSend(Connection c){ return this.refused_sends.remove(Long.valueOf(c.getId())); } public Connection removeBwdFailedSend(Connection c){ return this.bwd_failed_sends.remove(Long.valueOf(c.getId())); }*/ public Connection removeEndSend(Connection c){ return this.end_sends.remove(Long.valueOf(c.getId())); } //puts public Connection putSendRequest(Connection c){ return this.send_requests.put(Long.valueOf(c.getId()),c); } /*public Connection putRefusedSend(Connection c){ return this.refused_sends.put(Long.valueOf(c.getId()),c); } public Connection putBwdFailedSend(Connection c){ return this.bwd_failed_sends.put(Long.valueOf(c.getId()),c); }*/ public Connection putEndSend(Connection c){ return this.end_sends.put(Long.valueOf(c.getId()),c); } /* private Hashtable<Long, Connection> send_requests; private Hashtable<Long, Connection> accepted_sends; private Hashtable<Long, Connection> refused_sends; private Hashtable<Long, Connection> end_sends; */ public Enumeration<Connection> getSendRequests(){ return this.send_requests.elements(); } /*public Enumeration<Connection> getRefusedSends(){ return this.refused_sends.elements(); } public Enumeration<Connection> getBwdFailedSends(){ return this.bwd_failed_sends.elements(); }*/ public Enumeration<Connection> getEndSends(){ return this.end_sends.elements(); } public void setControl(){ this.control=true; } public boolean isControl(){ return this.control; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.lib.server; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.lib.lang.XException; import org.apache.hadoop.test.HTestCase; import org.apache.hadoop.test.TestDir; import org.apache.hadoop.test.TestDirHelper; import org.apache.hadoop.test.TestException; import org.apache.hadoop.util.StringUtils; import org.junit.Test; import java.io.File; import java.io.FileOutputStream; import java.io.FileWriter; import java.io.InputStream; import java.io.OutputStream; import java.io.Writer; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; public class TestServer extends HTestCase { @Test @TestDir public void constructorsGetters() throws Exception { Server server = new Server("server", "/a", "/b", "/c", "/d", new Configuration(false)); assertEquals(server.getHomeDir(), "/a"); assertEquals(server.getConfigDir(), "/b"); assertEquals(server.getLogDir(), "/c"); assertEquals(server.getTempDir(), "/d"); assertEquals(server.getName(), "server"); assertEquals(server.getPrefix(), "server"); assertEquals(server.getPrefixedName("name"), "server.name"); assertNotNull(server.getConfig()); server = new Server("server", "/a", "/b", "/c", "/d"); assertEquals(server.getHomeDir(), "/a"); assertEquals(server.getConfigDir(), "/b"); assertEquals(server.getLogDir(), "/c"); assertEquals(server.getTempDir(), "/d"); assertEquals(server.getName(), "server"); assertEquals(server.getPrefix(), "server"); assertEquals(server.getPrefixedName("name"), "server.name"); assertNull(server.getConfig()); server = new Server("server", TestDirHelper.getTestDir().getAbsolutePath(), new Configuration(false)); assertEquals(server.getHomeDir(), TestDirHelper.getTestDir().getAbsolutePath()); assertEquals(server.getConfigDir(), TestDirHelper.getTestDir() + "/conf"); assertEquals(server.getLogDir(), TestDirHelper.getTestDir() + "/log"); assertEquals(server.getTempDir(), TestDirHelper.getTestDir() + "/temp"); assertEquals(server.getName(), "server"); assertEquals(server.getPrefix(), "server"); assertEquals(server.getPrefixedName("name"), "server.name"); assertNotNull(server.getConfig()); server = new Server("server", TestDirHelper.getTestDir().getAbsolutePath()); assertEquals(server.getHomeDir(), TestDirHelper.getTestDir().getAbsolutePath()); assertEquals(server.getConfigDir(), TestDirHelper.getTestDir() + "/conf"); assertEquals(server.getLogDir(), TestDirHelper.getTestDir() + "/log"); assertEquals(server.getTempDir(), TestDirHelper.getTestDir() + "/temp"); assertEquals(server.getName(), "server"); assertEquals(server.getPrefix(), "server"); assertEquals(server.getPrefixedName("name"), "server.name"); assertNull(server.getConfig()); } @Test @TestException(exception = ServerException.class, msgRegExp = "S01.*") @TestDir public void initNoHomeDir() throws Exception { File homeDir = new File(TestDirHelper.getTestDir(), "home"); Configuration conf = new Configuration(false); conf.set("server.services", TestService.class.getName()); Server server = new Server("server", homeDir.getAbsolutePath(), conf); server.init(); } @Test @TestException(exception = ServerException.class, msgRegExp = "S02.*") @TestDir public void initHomeDirNotDir() throws Exception { File homeDir = new File(TestDirHelper.getTestDir(), "home"); new FileOutputStream(homeDir).close(); Configuration conf = new Configuration(false); conf.set("server.services", TestService.class.getName()); Server server = new Server("server", homeDir.getAbsolutePath(), conf); server.init(); } @Test @TestException(exception = ServerException.class, msgRegExp = "S01.*") @TestDir public void initNoConfigDir() throws Exception { File homeDir = new File(TestDirHelper.getTestDir(), "home"); assertTrue(homeDir.mkdir()); assertTrue(new File(homeDir, "log").mkdir()); assertTrue(new File(homeDir, "temp").mkdir()); Configuration conf = new Configuration(false); conf.set("server.services", TestService.class.getName()); Server server = new Server("server", homeDir.getAbsolutePath(), conf); server.init(); } @Test @TestException(exception = ServerException.class, msgRegExp = "S02.*") @TestDir public void initConfigDirNotDir() throws Exception { File homeDir = new File(TestDirHelper.getTestDir(), "home"); assertTrue(homeDir.mkdir()); assertTrue(new File(homeDir, "log").mkdir()); assertTrue(new File(homeDir, "temp").mkdir()); File configDir = new File(homeDir, "conf"); new FileOutputStream(configDir).close(); Configuration conf = new Configuration(false); conf.set("server.services", TestService.class.getName()); Server server = new Server("server", homeDir.getAbsolutePath(), conf); server.init(); } @Test @TestException(exception = ServerException.class, msgRegExp = "S01.*") @TestDir public void initNoLogDir() throws Exception { File homeDir = new File(TestDirHelper.getTestDir(), "home"); assertTrue(homeDir.mkdir()); assertTrue(new File(homeDir, "conf").mkdir()); assertTrue(new File(homeDir, "temp").mkdir()); Configuration conf = new Configuration(false); conf.set("server.services", TestService.class.getName()); Server server = new Server("server", homeDir.getAbsolutePath(), conf); server.init(); } @Test @TestException(exception = ServerException.class, msgRegExp = "S02.*") @TestDir public void initLogDirNotDir() throws Exception { File homeDir = new File(TestDirHelper.getTestDir(), "home"); assertTrue(homeDir.mkdir()); assertTrue(new File(homeDir, "conf").mkdir()); assertTrue(new File(homeDir, "temp").mkdir()); File logDir = new File(homeDir, "log"); new FileOutputStream(logDir).close(); Configuration conf = new Configuration(false); conf.set("server.services", TestService.class.getName()); Server server = new Server("server", homeDir.getAbsolutePath(), conf); server.init(); } @Test @TestException(exception = ServerException.class, msgRegExp = "S01.*") @TestDir public void initNoTempDir() throws Exception { File homeDir = new File(TestDirHelper.getTestDir(), "home"); assertTrue(homeDir.mkdir()); assertTrue(new File(homeDir, "conf").mkdir()); assertTrue(new File(homeDir, "log").mkdir()); Configuration conf = new Configuration(false); conf.set("server.services", TestService.class.getName()); Server server = new Server("server", homeDir.getAbsolutePath(), conf); server.init(); } @Test @TestException(exception = ServerException.class, msgRegExp = "S02.*") @TestDir public void initTempDirNotDir() throws Exception { File homeDir = new File(TestDirHelper.getTestDir(), "home"); assertTrue(homeDir.mkdir()); assertTrue(new File(homeDir, "conf").mkdir()); assertTrue(new File(homeDir, "log").mkdir()); File tempDir = new File(homeDir, "temp"); new FileOutputStream(tempDir).close(); Configuration conf = new Configuration(false); conf.set("server.services", TestService.class.getName()); Server server = new Server("server", homeDir.getAbsolutePath(), conf); server.init(); } @Test @TestException(exception = ServerException.class, msgRegExp = "S05.*") @TestDir public void siteFileNotAFile() throws Exception { String homeDir = TestDirHelper.getTestDir().getAbsolutePath(); File siteFile = new File(homeDir, "server-site.xml"); assertTrue(siteFile.mkdir()); Server server = new Server("server", homeDir, homeDir, homeDir, homeDir); server.init(); } private Server createServer(Configuration conf) { return new Server("server", TestDirHelper.getTestDir().getAbsolutePath(), TestDirHelper.getTestDir().getAbsolutePath(), TestDirHelper.getTestDir().getAbsolutePath(), TestDirHelper.getTestDir().getAbsolutePath(), conf); } @Test @TestDir public void log4jFile() throws Exception { InputStream is = Server.getResource("default-log4j.properties"); OutputStream os = new FileOutputStream( new File(TestDirHelper.getTestDir(), "server-log4j.properties")); IOUtils.copyBytes(is, os, 1024, true); Configuration conf = new Configuration(false); Server server = createServer(conf); server.init(); } public static class LifeCycleService extends BaseService { public LifeCycleService() { super("lifecycle"); } @Override protected void init() throws ServiceException { assertEquals(getServer().getStatus(), Server.Status.BOOTING); } @Override public void destroy() { assertEquals(getServer().getStatus(), Server.Status.SHUTTING_DOWN); super.destroy(); } @Override public Class getInterface() { return LifeCycleService.class; } } @Test @TestDir public void lifeCycle() throws Exception { Configuration conf = new Configuration(false); conf.set("server.services", LifeCycleService.class.getName()); Server server = createServer(conf); assertEquals(server.getStatus(), Server.Status.UNDEF); server.init(); assertNotNull(server.get(LifeCycleService.class)); assertEquals(server.getStatus(), Server.Status.NORMAL); server.destroy(); assertEquals(server.getStatus(), Server.Status.SHUTDOWN); } @Test @TestDir public void startWithStatusNotNormal() throws Exception { Configuration conf = new Configuration(false); conf.set("server.startup.status", "ADMIN"); Server server = createServer(conf); server.init(); assertEquals(server.getStatus(), Server.Status.ADMIN); server.destroy(); } @Test(expected = IllegalArgumentException.class) @TestDir public void nonSeteableStatus() throws Exception { Configuration conf = new Configuration(false); Server server = createServer(conf); server.init(); server.setStatus(Server.Status.SHUTDOWN); } public static class TestService implements Service { static List<String> LIFECYCLE = new ArrayList<>(); @Override public void init(Server server) throws ServiceException { LIFECYCLE.add("init"); } @Override public void postInit() throws ServiceException { LIFECYCLE.add("postInit"); } @Override public void destroy() { LIFECYCLE.add("destroy"); } @Override public Class[] getServiceDependencies() { return new Class[0]; } @Override public Class getInterface() { return TestService.class; } @Override public void serverStatusChange(Server.Status oldStatus, Server.Status newStatus) throws ServiceException { LIFECYCLE.add("serverStatusChange"); } } public static class TestServiceExceptionOnStatusChange extends TestService { @Override public void serverStatusChange(Server.Status oldStatus, Server.Status newStatus) throws ServiceException { throw new RuntimeException(); } } @Test @TestDir public void changeStatus() throws Exception { TestService.LIFECYCLE.clear(); Configuration conf = new Configuration(false); conf.set("server.services", TestService.class.getName()); Server server = createServer(conf); server.init(); server.setStatus(Server.Status.ADMIN); assertTrue(TestService.LIFECYCLE.contains("serverStatusChange")); } @Test @TestException(exception = ServerException.class, msgRegExp = "S11.*") @TestDir public void changeStatusServiceException() throws Exception { TestService.LIFECYCLE.clear(); Configuration conf = new Configuration(false); conf.set("server.services", TestServiceExceptionOnStatusChange.class.getName()); Server server = createServer(conf); server.init(); } @Test @TestDir public void setSameStatus() throws Exception { Configuration conf = new Configuration(false); conf.set("server.services", TestService.class.getName()); Server server = createServer(conf); server.init(); TestService.LIFECYCLE.clear(); server.setStatus(server.getStatus()); assertFalse(TestService.LIFECYCLE.contains("serverStatusChange")); } @Test @TestDir public void serviceLifeCycle() throws Exception { TestService.LIFECYCLE.clear(); Configuration conf = new Configuration(false); conf.set("server.services", TestService.class.getName()); Server server = createServer(conf); server.init(); assertNotNull(server.get(TestService.class)); server.destroy(); assertEquals(TestService.LIFECYCLE, Arrays.asList("init", "postInit", "serverStatusChange", "destroy")); } @Test @TestDir public void loadingDefaultConfig() throws Exception { String dir = TestDirHelper.getTestDir().getAbsolutePath(); Server server = new Server("testserver", dir, dir, dir, dir); server.init(); assertEquals(server.getConfig().get("testserver.a"), "default"); } @Test @TestDir public void loadingSiteConfig() throws Exception { String dir = TestDirHelper.getTestDir().getAbsolutePath(); File configFile = new File(dir, "testserver-site.xml"); Writer w = new FileWriter(configFile); w.write( "<configuration><property><name>testserver.a</name><value>site</value></property></configuration>"); w.close(); Server server = new Server("testserver", dir, dir, dir, dir); server.init(); assertEquals(server.getConfig().get("testserver.a"), "site"); } @Test @TestDir public void loadingSysPropConfig() throws Exception { try { System.setProperty("testserver.a", "sysprop"); String dir = TestDirHelper.getTestDir().getAbsolutePath(); File configFile = new File(dir, "testserver-site.xml"); Writer w = new FileWriter(configFile); w.write( "<configuration><property><name>testserver.a</name><value>site</value></property></configuration>"); w.close(); Server server = new Server("testserver", dir, dir, dir, dir); server.init(); assertEquals(server.getConfig().get("testserver.a"), "sysprop"); } finally { System.getProperties().remove("testserver.a"); } } @Test(expected = IllegalStateException.class) @TestDir public void illegalState1() throws Exception { Server server = new Server("server", TestDirHelper.getTestDir().getAbsolutePath(), new Configuration(false)); server.destroy(); } @Test(expected = IllegalStateException.class) @TestDir public void illegalState2() throws Exception { Server server = new Server("server", TestDirHelper.getTestDir().getAbsolutePath(), new Configuration(false)); server.get(Object.class); } @Test(expected = IllegalStateException.class) @TestDir public void illegalState3() throws Exception { Server server = new Server("server", TestDirHelper.getTestDir().getAbsolutePath(), new Configuration(false)); server.setService(null); } @Test(expected = IllegalStateException.class) @TestDir public void illegalState4() throws Exception { String dir = TestDirHelper.getTestDir().getAbsolutePath(); Server server = new Server("server", dir, dir, dir, dir, new Configuration(false)); server.init(); server.init(); } private static List<String> ORDER = new ArrayList<>(); public abstract static class MyService implements Service, XException.ERROR { private String id; private Class serviceInterface; private Class[] dependencies; private boolean failOnInit; private boolean failOnDestroy; protected MyService(String id, Class serviceInterface, Class[] dependencies, boolean failOnInit, boolean failOnDestroy) { this.id = id; this.serviceInterface = serviceInterface; this.dependencies = dependencies; this.failOnInit = failOnInit; this.failOnDestroy = failOnDestroy; } @Override public void init(Server server) throws ServiceException { ORDER.add(id + ".init"); if (failOnInit) { throw new ServiceException(this); } } @Override public void postInit() throws ServiceException { ORDER.add(id + ".postInit"); } @Override public String getTemplate() { return ""; } @Override public void destroy() { ORDER.add(id + ".destroy"); if (failOnDestroy) { throw new RuntimeException(); } } @Override public Class[] getServiceDependencies() { return dependencies; } @Override public Class getInterface() { return serviceInterface; } @Override public void serverStatusChange(Server.Status oldStatus, Server.Status newStatus) throws ServiceException { } } public static class MyService1 extends MyService { public MyService1() { super("s1", MyService1.class, null, false, false); } protected MyService1(String id, Class serviceInterface, Class[] dependencies, boolean failOnInit, boolean failOnDestroy) { super(id, serviceInterface, dependencies, failOnInit, failOnDestroy); } } public static class MyService2 extends MyService { public MyService2() { super("s2", MyService2.class, null, true, false); } } public static class MyService3 extends MyService { public MyService3() { super("s3", MyService3.class, null, false, false); } } public static class MyService1a extends MyService1 { public MyService1a() { super("s1a", MyService1.class, null, false, false); } } public static class MyService4 extends MyService1 { public MyService4() { super("s4a", String.class, null, false, false); } } public static class MyService5 extends MyService { public MyService5() { super("s5", MyService5.class, null, false, true); } protected MyService5(String id, Class serviceInterface, Class[] dependencies, boolean failOnInit, boolean failOnDestroy) { super(id, serviceInterface, dependencies, failOnInit, failOnDestroy); } } public static class MyService5a extends MyService5 { public MyService5a() { super("s5a", MyService5.class, null, false, false); } } public static class MyService6 extends MyService { public MyService6() { super("s6", MyService6.class, new Class[]{MyService1.class}, false, false); } } public static class MyService7 extends MyService { @SuppressWarnings({"UnusedParameters"}) public MyService7(String foo) { super("s6", MyService7.class, new Class[]{MyService1.class}, false, false); } } @Test @TestException(exception = ServerException.class, msgRegExp = "S08.*") @TestDir public void invalidSservice() throws Exception { String dir = TestDirHelper.getTestDir().getAbsolutePath(); Configuration conf = new Configuration(false); conf.set("server.services", "foo"); Server server = new Server("server", dir, dir, dir, dir, conf); server.init(); } @Test @TestException(exception = ServerException.class, msgRegExp = "S07.*") @TestDir public void serviceWithNoDefaultConstructor() throws Exception { String dir = TestDirHelper.getTestDir().getAbsolutePath(); Configuration conf = new Configuration(false); conf.set("server.services", MyService7.class.getName()); Server server = new Server("server", dir, dir, dir, dir, conf); server.init(); } @Test @TestException(exception = ServerException.class, msgRegExp = "S04.*") @TestDir public void serviceNotImplementingServiceInterface() throws Exception { String dir = TestDirHelper.getTestDir().getAbsolutePath(); Configuration conf = new Configuration(false); conf.set("server.services", MyService4.class.getName()); Server server = new Server("server", dir, dir, dir, dir, conf); server.init(); } @Test @TestException(exception = ServerException.class, msgRegExp = "S10.*") @TestDir public void serviceWithMissingDependency() throws Exception { String dir = TestDirHelper.getTestDir().getAbsolutePath(); Configuration conf = new Configuration(false); String services = StringUtils.join(",", Arrays.asList(MyService3.class.getName(), MyService6.class.getName())); conf.set("server.services", services); Server server = new Server("server", dir, dir, dir, dir, conf); server.init(); } @Test @TestDir public void services() throws Exception { String dir = TestDirHelper.getTestDir().getAbsolutePath(); Configuration conf; Server server; // no services ORDER.clear(); conf = new Configuration(false); server = new Server("server", dir, dir, dir, dir, conf); server.init(); assertEquals(ORDER.size(), 0); // 2 services init/destroy ORDER.clear(); String services = StringUtils.join(",", Arrays.asList(MyService1.class.getName(), MyService3.class.getName())); conf = new Configuration(false); conf.set("server.services", services); server = new Server("server", dir, dir, dir, dir, conf); server.init(); assertEquals(server.get(MyService1.class).getInterface(), MyService1.class); assertEquals(server.get(MyService3.class).getInterface(), MyService3.class); assertEquals(ORDER.size(), 4); assertEquals(ORDER.get(0), "s1.init"); assertEquals(ORDER.get(1), "s3.init"); assertEquals(ORDER.get(2), "s1.postInit"); assertEquals(ORDER.get(3), "s3.postInit"); server.destroy(); assertEquals(ORDER.size(), 6); assertEquals(ORDER.get(4), "s3.destroy"); assertEquals(ORDER.get(5), "s1.destroy"); // 3 services, 2nd one fails on init ORDER.clear(); services = StringUtils.join(",", Arrays .asList(MyService1.class.getName(), MyService2.class.getName(), MyService3.class.getName())); conf = new Configuration(false); conf.set("server.services", services); server = new Server("server", dir, dir, dir, dir, conf); try { server.init(); fail(); } catch (ServerException ex) { assertEquals(MyService2.class, ex.getError().getClass()); } catch (Exception ex) { fail(); } assertEquals(ORDER.size(), 3); assertEquals(ORDER.get(0), "s1.init"); assertEquals(ORDER.get(1), "s2.init"); assertEquals(ORDER.get(2), "s1.destroy"); // 2 services one fails on destroy ORDER.clear(); services = StringUtils.join(",", Arrays.asList(MyService1.class.getName(), MyService5.class.getName())); conf = new Configuration(false); conf.set("server.services", services); server = new Server("server", dir, dir, dir, dir, conf); server.init(); assertEquals(ORDER.size(), 4); assertEquals(ORDER.get(0), "s1.init"); assertEquals(ORDER.get(1), "s5.init"); assertEquals(ORDER.get(2), "s1.postInit"); assertEquals(ORDER.get(3), "s5.postInit"); server.destroy(); assertEquals(ORDER.size(), 6); assertEquals(ORDER.get(4), "s5.destroy"); assertEquals(ORDER.get(5), "s1.destroy"); // service override via ext ORDER.clear(); services = StringUtils.join(",", Arrays.asList(MyService1.class.getName(), MyService3.class.getName())); String servicesExt = StringUtils.join(",", Arrays.asList(MyService1a.class.getName())); conf = new Configuration(false); conf.set("server.services", services); conf.set("server.services.ext", servicesExt); server = new Server("server", dir, dir, dir, dir, conf); server.init(); assertEquals(server.get(MyService1.class).getClass(), MyService1a.class); assertEquals(ORDER.size(), 4); assertEquals(ORDER.get(0), "s1a.init"); assertEquals(ORDER.get(1), "s3.init"); assertEquals(ORDER.get(2), "s1a.postInit"); assertEquals(ORDER.get(3), "s3.postInit"); server.destroy(); assertEquals(ORDER.size(), 6); assertEquals(ORDER.get(4), "s3.destroy"); assertEquals(ORDER.get(5), "s1a.destroy"); // service override via setService ORDER.clear(); services = StringUtils.join(",", Arrays.asList(MyService1.class.getName(), MyService3.class.getName())); conf = new Configuration(false); conf.set("server.services", services); server = new Server("server", dir, dir, dir, dir, conf); server.init(); server.setService(MyService1a.class); assertEquals(ORDER.size(), 6); assertEquals(ORDER.get(4), "s1.destroy"); assertEquals(ORDER.get(5), "s1a.init"); assertEquals(server.get(MyService1.class).getClass(), MyService1a.class); server.destroy(); assertEquals(ORDER.size(), 8); assertEquals(ORDER.get(6), "s3.destroy"); assertEquals(ORDER.get(7), "s1a.destroy"); // service add via setService ORDER.clear(); services = StringUtils.join(",", Arrays.asList(MyService1.class.getName(), MyService3.class.getName())); conf = new Configuration(false); conf.set("server.services", services); server = new Server("server", dir, dir, dir, dir, conf); server.init(); server.setService(MyService5.class); assertEquals(ORDER.size(), 5); assertEquals(ORDER.get(4), "s5.init"); assertEquals(server.get(MyService5.class).getClass(), MyService5.class); server.destroy(); assertEquals(ORDER.size(), 8); assertEquals(ORDER.get(5), "s5.destroy"); assertEquals(ORDER.get(6), "s3.destroy"); assertEquals(ORDER.get(7), "s1.destroy"); // service add via setService exception ORDER.clear(); services = StringUtils.join(",", Arrays.asList(MyService1.class.getName(), MyService3.class.getName())); conf = new Configuration(false); conf.set("server.services", services); server = new Server("server", dir, dir, dir, dir, conf); server.init(); try { server.setService(MyService7.class); fail(); } catch (ServerException ex) { assertEquals(ServerException.ERROR.S09, ex.getError()); } catch (Exception ex) { fail(); } assertEquals(ORDER.size(), 6); assertEquals(ORDER.get(4), "s3.destroy"); assertEquals(ORDER.get(5), "s1.destroy"); // service with dependency ORDER.clear(); services = StringUtils.join(",", Arrays.asList(MyService1.class.getName(), MyService6.class.getName())); conf = new Configuration(false); conf.set("server.services", services); server = new Server("server", dir, dir, dir, dir, conf); server.init(); assertEquals(server.get(MyService1.class).getInterface(), MyService1.class); assertEquals(server.get(MyService6.class).getInterface(), MyService6.class); server.destroy(); } }
package nyla.solutions.core.util; import nyla.solutions.core.data.clock.Day; import nyla.solutions.core.exception.ConfigException; import nyla.solutions.core.exception.ConfigLockException; import nyla.solutions.core.patterns.observer.SubjectObserver; import nyla.solutions.core.util.settings.ConfigSettings; import nyla.solutions.core.util.settings.Settings; import java.util.Map; import java.util.Properties; import java.util.ResourceBundle; import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.ReentrantLock; /** * <pre> * This class provides a central mechanism for applications to access * key/value property settings and encrypted passwords. * * There are several ways to set to specify the * configuration property file location. * <ol><li>Add file config.properties to CLASSPATH. *This file will be loaded as a Java resource bundle.</li> * <li>Add the JVM argument -Dconfig.properties where the value is equal to *the location of configuration file. * *Example: *-Dconfig.properties=/dev/configurations/files/system/config.properties</li></ol> * There are methods to get the String value property such as <i>Config.getProperty(key)</i> method * or get an expected property value of a type such as Integer, Boolean, etc. * * JVM argument system properties can also by accessed by adding the * following to the configuration file; * <span style="color:blue"> * nyla.solutions.core.util.Config.mergeSystemProperties=true * </span> * * Values in the System properties can be set with values for the configuration by using the following * solutions.global.util.Config.setSystemProperties=true * * It also supports formatting several property values into a single property * by the added the following property; * * <span style="color:blue"> * nyla.solutions.core.util.Config.useFormatting=true * * <i>Example</i> * machineName=localhost * host=${machineName}.mycompany.com * </span> * * By default the configuration is read only once when the * application is initialized. Add the following to the * configuration property file to always reload the property whenever * a getProperty... method is called. * * <span style="color:blue">nyla.solutions.core.util.Config.alwaysReloadProperties=true</span> * </pre> * * @author Gregory Green */ public class Config { private final static ReentrantLock lock = new ReentrantLock(); public static final String RESOURCE_BUNDLE_NAME = "config"; /** * SMP_PROPERTY_FILE */ public static final String SYS_PROPERTY = "config.properties"; public static final String DEFAULT_PROP_FILE_NAME = SYS_PROPERTY; private static Settings settings = null; private static final long lockPeriodMs = 3000; /** * Property may reference properties in example ${prop.name}+somethingElse * @param property the property * @return the formatted value * @throws ConfigException when format exception occur */ public static String interpret(String property) { return getSettings().interpret(property); }// -------------------------------------------------------- /** * * @param alwaysReload boolean to determine you should always relaod */ public static void setAlwaysReload(boolean alwaysReload) { getSettings().setAlwaysReload(alwaysReload); }// -------------------------------------------------------- public static void reLoad() { getSettings().reLoad(); }//------------------------------------------------ /** * * @return the configuration location */ public static String getLocation() { return System.getProperty("java.io.tmpdir"); }// ---------------------------------------------- /** * * @return System.getProperty("java.io.tmpdir") */ public static String getTempDir() { return System.getProperty("java.io.tmpdir"); }// ---------------------------------------------- /** * Retrieves a configuration property as a String object. * Loads the file if not already initialized. * * @param key Key Name of the property to be returned. * * @return Value of the property as a string or null if no property found. */ public static String getProperty(String key) { return getSettings().getProperty(key); }// ------------------------------------------------------------ /** * * @param key the key of the property * @return Text.split(getProperty(key)) */ public static String[] getPropertyStrings(String key) { return getSettings().getPropertyStrings(key); }// ------------------------------------------------------------ /** * Multiple properties separated by white spaces * * @param aClass the calling classes * @param key the key * @return array of String */ public static String[] getPropertyStrings(Class<?> aClass, String key) { return Text.split(getProperty(aClass, key)); }// ----------------------------------------------- /** * Multiple properties separated by white spaces * * @param aClass * @param key Name of the property to be returned. * @param aDefault the default value * @return Text.split(getProperty(aClass,key,aDefault)) */ public static String[] getPropertyStrings(Class<?> aClass, String key, String aDefault) { return getSettings().getPropertyStrings(aClass, key,aDefault); } /** * Get the property * @param aClass the class associate with property * @param key the property key * @param resourceBundle the resource bundle default used if property not found * @return the property key */ public static String getProperty(Class<?> aClass,String key,ResourceBundle resourceBundle) { return getSettings().getProperty(aClass, key, resourceBundle); }// -------------------------------------------------------- /** * Multiple properties separated by white spaces * * @param aClass * @param key Name of the property to be returned. * @param aDefault the default value * @return Text.split(getProperty(aClass,key,aDefault)) */ public static String[] getPropertyStrings(Class<?> aClass, String key, String... aDefault) { return getSettings().getPropertyStrings(aClass, key,aDefault); }// ----------------------------------------------- /** * Retrieves a configuration property as a String object. * <p/> * Loads the file if not already initialized. * * @param aClass the calling class * @param key property key * * @return Value of the property as a string or null if no property found. */ public static String getProperty(Class<?> aClass, String key) { return getSettings().getProperty(aClass, key); }// --------------------------------------------- /** * Retrieves a configuration property as a String object. * Loads the file if not already initialized. * * @param aClass the class the name * @param key Name of the property to be returned. * @param aDefault the default value * @return Value of the property as a string or null if no property found. */ public static String getProperty(Class<?> aClass, String key, String aDefault) { return getSettings().getProperty(aClass, key, aDefault); }// --------------------------------------------- public static String getPropertyEnv(String key) { String env = sanitizeEnvVarNAme(key); return getProperty(env); } public static String getPropertyEnv(String key, String aDefault) { String env = sanitizeEnvVarNAme(key); return getProperty(env, aDefault); } public static String sanitizeEnvVarNAme(String key) { String env = Text.replaceForRegExprWith(key, "[-\\. ]","_").toUpperCase(); return env; } /** * Retrieves a configuration property as a String object. * * Loads the file if not already initialized. * * @param key Key Name of the property to be returned. * @param aDefault the default value * @return Value of the property as a string or null if no property found. */ public static String getProperty(String key, String aDefault) { return getSettings().getProperty(key, aDefault); }// ------------------------------------------------------------ /** * Get a configuration property as an Integer object. * * @param aClass calling class * @param key the Key Name of the numeric property to be returned. * @param defaultValue the default value * @return Value of the property as an Integer or null if no property found. */ public static Integer getPropertyInteger(Class<?> aClass, String key, int defaultValue) { return getSettings().getPropertyInteger(key, defaultValue); }// ------------------------------------------------------------ /** * Get a configuration property as an c object. * * @param aClass the class the property is related to * @param key the configuration name * @param defaultValue the default value to return if the property does not * exist * @return the configuration character */ public static Character getPropertyCharacter(Class<?> aClass, String key, char defaultValue) { return getSettings().getPropertyCharacter(aClass, key, defaultValue); }// --------------------------------------------- /** * Get a configuration property as an Integer object. * * @param key Name of the numeric property to be returned. * * @return Value of the property as an Integer or null if no property found. */ public static Integer getPropertyInteger(String key) { return getSettings().getPropertyInteger(key); }// ------------------------------------------------------------ public static Integer getPropertyInteger(String key, int aDefault) { return getSettings().getPropertyInteger(key, aDefault); }// ------------------------------------------------------------- /** * Get a double property * @param cls the class associated with the property * @param key the property key name * @return the double property value */ public static Double getPropertyDouble(Class<?> cls, String key) { return getSettings().getPropertyDouble(cls, key); }// --------------------------------------------- /** * Get a double property * @param aClass the class associated with the property * @param key the property key name * @param defaultValue the default double property * @return the double property value */ public static Double getPropertyDouble(Class<?> aClass, String key, double defaultValue) { return getSettings().getPropertyDouble(key, defaultValue); }// ------------------------------------------------------------ /** * * @param key the double key * @return the Double property */ public static Double getPropertyDouble(String key) { return getSettings().getPropertyDouble(key); }// ------------------------------------------------------------ public static Double getPropertyDouble(String key, double aDefault) { return getPropertyDouble(key, Double.valueOf(aDefault)); }// ------------------------------------------------------------- public static Double getPropertyDouble(String key, Double aDefault) { return getSettings().getPropertyDouble(key, aDefault); }// ------------------------------------------------------------ public static Integer getPropertyInteger(Class<?> cls, String key) { return getSettings().getPropertyInteger(cls, key); }// --------------------------------------------- public static Integer getPropertyInteger(Class<?> cls, String key, Integer aDefault) { return getSettings().getPropertyInteger(cls, key,aDefault); }// --------------------------------------------- public static Integer getPropertyInteger(String key, Integer aDefault) { return getSettings().getPropertyInteger(key, aDefault); }// ------------------------------------------------------------ /** * Get a configuration property as a Boolean object. * * @param key the Key Name of the numeric property to be returned. * * @return Value of the property as an Boolean or null if no property found. * Note that the value of the returned Boolean will be false if the * property sought after exists but is not equal to "true" (ignoring * case). */ public static Boolean getPropertyBoolean(String key) { return getSettings().getPropertyBoolean(key); }// ------------------------------------------------------------ /** * * @param key the property key * @param aBool the default boolean * @return property boolean */ public static Boolean getPropertyBoolean(String key, Boolean aBool) { return getSettings().getPropertyBoolean(key, aBool); }// ------------------------------------------------------------ /** * @param aClass the class name * @param key the configuration key * @param aBool default value * * @return aBool if the configuration value for the key is blank */ public static Boolean getPropertyBoolean(Class<?> aClass, String key, boolean aBool) { return getSettings().getPropertyBoolean(aClass, key, aBool); }// --------------------------------------------- /** * @param key the configuration key * @param aBool default value * * @return aBool if the configuration value for the key is blank */ public static Boolean getPropertyBoolean(String key, boolean aBool) { return getSettings().getPropertyBoolean(key, aBool); }// ------------------------------------------------------------ /** * * @param key the property key * @return the long property */ public static Long getPropertyLong(String key) { return getSettings().getPropertyLong(key); }// ------------------------------------------------------------ public static Long getPropertyLong(Class<?> aClass, String key, long aDefault) { return getSettings().getPropertyLong(aClass, key, aDefault); }// ------------------------------------------------------------ public static Long getPropertyLong(Class<?> aClass, String key) { return getSettings().getPropertyLong(aClass, key); }// ------------------------------------------------------------ public static Long getPropertyLong(String key, long aDefault) { return getSettings().getPropertyLong(key,aDefault); }// ------------------------------------------------------------- public static Long getPropertyLong(String key, Long aDefault) { return getSettings().getPropertyLong(key,aDefault); }// ------------------------------------------------------------ /** * Get a configuration property as a Password object. * * @param key Name of the numeric property to be returned. * * @return Value of the property as an Password or null if no property * found. * * Note that the value of the returned Password will be false if the * * property sought after exists but is not equal to "true" (ignoring * case). */ public static char[] getPropertyPassword(String key) { return getSettings().getPropertyPassword(key); }// ------------------------------------------------------------ public static Day getDay(String key) { return new Day(getProperty(key)); } /** * Get the an encrypted password * * @param key the key * @param defaultPassword * @return the default password if no password exists in the configuration */ public static char[] getPropertyPassword(String key, char... defaultPassword) { return getSettings().getPropertyPassword(key, defaultPassword); }// ------------------------------------------------------------ /** * Get the an encrypted password * * @param key the key * @param defaultPassword * @return the default password if no password exists in the configuration */ public static char[] getPropertyPassword(String key, String defaultPassword) { return getSettings().getPropertyPassword(key, defaultPassword); }// ------------------------------------------------------------ /** * Retrieve the password * * @param aClass the class name * @param key the configuration key * @param defaultPassword default value * * @return defaultPassword if the configuration value for the key is blank */ public static char[] getPropertyPassword(Class<?> aClass, String key, char[] defaultPassword) { return getSettings().getPropertyPassword(aClass, key, defaultPassword); }// --------------------------------------------- /** * @return a copy of the configured properties */ public static Map<Object,Object> getProperties() { return getSettings().getProperties(); }// ------------------------------------------------------------ public static void setProperties(Properties properties) { try { if(lock.tryLock(lockPeriodMs, TimeUnit.MILLISECONDS)) { try { getSettings().setProperties(properties); } finally { lock.unlock(); } } else { throw new ConfigLockException("Setting properties"); } } catch (InterruptedException e) { e.printStackTrace(); } }// -------------------------------------------- /** * * @return System.getProperty("user.dir") */ public static String getUserDir() { return System.getProperty("user.dir"); }// -------------------------------------------- /** * * @return System.getProperty("file.separator") */ public static String getFileSeparator() { return System.getProperty("file.separator"); }// -------------------------------------------- public static Settings getSettings() { try { if(lock.tryLock(lockPeriodMs,TimeUnit.MILLISECONDS)) { try{ if(settings == null) settings = new ConfigSettings(); return settings; } finally { lock.unlock(); } } else { throw new ConfigLockException("Get settings"); } } catch (InterruptedException e) { throw new ConfigException(e); } }//------------------------------------------------ public static void setSettings(Settings theSettings) { try { if(lock.tryLock(lockPeriodMs,TimeUnit.MILLISECONDS)) { if (theSettings == null) throw new IllegalArgumentException("theSettings is required"); settings = theSettings; } else { throw new ConfigLockException("Setting settings"); } } catch (InterruptedException e) { throw new ConfigException(e); } }//------------------------------------------------ /** * Do environment variable name friend configuration lookup * @param key the Environment variable * @param properties the default properties * @return from properties or environment/configurations */ public static String getPropertyEnv(String key, Map<?,?> properties) { Object value = null; if(properties != null) { value = properties.get(key); if(value != null) return value.toString(); } value = getPropertyEnv(key, ""); String text = value.toString(); if(text.length() == 0) return null; return text; }//------------------------------------------------ public static void registerObserver(SubjectObserver<Settings> settingsObserver) { getSettings().registerObserver(settingsObserver); }//------------------------------------------------ public static Day getPropertyDay(String key) { return new Day(getProperty(key)); } /** * Parse input arguments and add to configuration properties * @param args the input arguments */ public static Settings loadArgs(String[] args) { return getSettings().loadArgs(args); }//------------------------------------------------ /** * Lookup a property using a default if not found * @param key other property key * @param properties the default props * @param defaultValue the default value to use if not found * @return the found property value */ public static String getPropertyEnv(String key, Properties properties, String defaultValue) { String value = getPropertyEnv(key, properties); if(value == null || value.length() == 0) return defaultValue; return value; } public static <T> Class<T> getPropertyClass(String propertyKey) { return getSettings().getPropertyClass(propertyKey); } public static <T> Class<T> getPropertyClass(String propertyKey,Class<T> defautlClass) { return getSettings().getPropertyClass(propertyKey,defautlClass); } }
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.selenium; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import static org.openqa.selenium.testing.drivers.Browser.CHROME; import static org.openqa.selenium.testing.drivers.Browser.EDGE; import static org.openqa.selenium.testing.drivers.Browser.FIREFOX; import static org.openqa.selenium.testing.drivers.Browser.HTMLUNIT; import static org.openqa.selenium.testing.drivers.Browser.IE; import static org.openqa.selenium.testing.drivers.Browser.SAFARI; import org.junit.Test; import org.openqa.selenium.testing.JUnit4TestBase; import org.openqa.selenium.testing.NotYetImplemented; public class ClearTest extends JUnit4TestBase { @Test public void testWritableTextInputShouldClear() { driver.get(pages.readOnlyPage); WebElement element = driver.findElement(By.id("writableTextInput")); element.clear(); assertThat(element.getAttribute("value")).isEqualTo(""); } @Test public void testTextInputShouldNotClearWhenDisabled() { driver.get(pages.readOnlyPage); WebElement element = driver.findElement(By.id("textInputNotEnabled")); assertThat(element.isEnabled()).isFalse(); assertThatExceptionOfType(InvalidElementStateException.class) .isThrownBy(element::clear); } @Test public void testTextInputShouldNotClearWhenReadOnly() { driver.get(pages.readOnlyPage); WebElement element = driver.findElement(By.id("readOnlyTextInput")); assertThatExceptionOfType(InvalidElementStateException.class) .isThrownBy(element::clear); } @Test public void testWritableTextAreaShouldClear() { driver.get(pages.readOnlyPage); WebElement element = driver.findElement(By.id("writableTextArea")); element.clear(); assertThat(element.getAttribute("value")).isEqualTo(""); } @Test public void testTextAreaShouldNotClearWhenDisabled() { driver.get(pages.readOnlyPage); WebElement element = driver.findElement(By.id("textAreaNotEnabled")); assertThatExceptionOfType(InvalidElementStateException.class) .isThrownBy(element::clear); } @Test public void testTextAreaShouldNotClearWhenReadOnly() { driver.get(pages.readOnlyPage); WebElement element = driver.findElement(By.id("textAreaReadOnly")); assertThatExceptionOfType(InvalidElementStateException.class) .isThrownBy(element::clear); } @Test public void testContentEditableAreaShouldClear() { driver.get(pages.readOnlyPage); WebElement element = driver.findElement(By.id("content-editable")); element.clear(); assertThat(element.getText()).isEqualTo(""); } @Test public void shouldBeAbleToClearNoTypeInput() { shouldBeAbleToClearInput(By.name("no_type"), "input with no type"); } @Test public void shouldBeAbleToClearNumberInput() { shouldBeAbleToClearInput(By.name("number_input"), "42"); } @Test public void shouldBeAbleToClearEmailInput() { shouldBeAbleToClearInput(By.name("email_input"), "admin@localhost"); } @Test public void shouldBeAbleToClearPasswordInput() { shouldBeAbleToClearInput(By.name("password_input"), "qwerty"); } @Test public void shouldBeAbleToClearSearchInput() { shouldBeAbleToClearInput(By.name("search_input"), "search"); } @Test public void shouldBeAbleToClearTelInput() { shouldBeAbleToClearInput(By.name("tel_input"), "911"); } @Test public void shouldBeAbleToClearTextInput() { shouldBeAbleToClearInput(By.name("text_input"), "text input"); } @Test public void shouldBeAbleToClearUrlInput() { shouldBeAbleToClearInput(By.name("url_input"), "https://selenium.dev/"); } @Test @NotYetImplemented(HTMLUNIT) public void shouldBeAbleToClearRangeInput() { shouldBeAbleToClearInput(By.name("range_input"), "42", "50"); } @Test @NotYetImplemented(CHROME) @NotYetImplemented(EDGE) @NotYetImplemented(FIREFOX) @NotYetImplemented(IE) @NotYetImplemented(SAFARI) public void shouldBeAbleToClearCheckboxInput() { shouldBeAbleToClearInput(By.name("checkbox_input"), "Checkbox"); } @Test @NotYetImplemented(HTMLUNIT) @NotYetImplemented(IE) public void shouldBeAbleToClearColorInput() { shouldBeAbleToClearInput(By.name("color_input"), "#00ffff", "#000000"); } @Test @NotYetImplemented(HTMLUNIT) public void shouldBeAbleToClearDateInput() { shouldBeAbleToClearInput(By.name("date_input"), "2017-11-22"); } @Test public void shouldBeAbleToClearDatetimeInput() { shouldBeAbleToClearInput(By.name("datetime_input"), "2017-11-22T11:22"); } @Test @NotYetImplemented(HTMLUNIT) public void shouldBeAbleToClearDatetimeLocalInput() { shouldBeAbleToClearInput(By.name("datetime_local_input"), "2017-11-22T11:22"); } @Test @NotYetImplemented(HTMLUNIT) public void shouldBeAbleToClearTimeInput() { shouldBeAbleToClearInput(By.name("time_input"), "11:22"); } @Test @NotYetImplemented(HTMLUNIT) public void shouldBeAbleToClearMonthInput() { shouldBeAbleToClearInput(By.name("month_input"), "2017-11"); } @Test @NotYetImplemented(HTMLUNIT) public void shouldBeAbleToClearWeekInput() { shouldBeAbleToClearInput(By.name("week_input"), "2017-W47"); } private void shouldBeAbleToClearInput(By locator, String oldValue) { shouldBeAbleToClearInput(locator, oldValue, ""); } private void shouldBeAbleToClearInput(By locator, String oldValue, String clearedValue) { driver.get(appServer.whereIs("inputs.html")); WebElement element = driver.findElement(locator); assertThat(element.getAttribute("value")).isEqualTo(oldValue); element.clear(); assertThat(element.getAttribute("value")).isEqualTo(clearedValue); } }
package kbasesearchengine.test.integration; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertThat; import java.io.File; import java.io.IOException; import java.net.URL; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.time.Instant; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import kbasesearchengine.common.FileUtil; import org.apache.commons.io.FileUtils; import org.apache.http.HttpHost; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import com.google.common.collect.ImmutableMap; import com.mongodb.MongoClient; import com.mongodb.client.MongoDatabase; import kbasesearchengine.common.GUID; import kbasesearchengine.events.handler.CloneableWorkspaceClientImpl; import kbasesearchengine.events.handler.WorkspaceEventHandler; import kbasesearchengine.events.storage.MongoDBStatusEventStorage; import kbasesearchengine.events.storage.StatusEventStorage; import kbasesearchengine.main.IndexerCoordinator; import kbasesearchengine.main.IndexerWorker; import kbasesearchengine.main.LineLogger; import kbasesearchengine.search.ElasticIndexingStorage; import kbasesearchengine.search.IndexingStorage; import kbasesearchengine.search.ObjectData; import kbasesearchengine.system.FileLister; import kbasesearchengine.system.ObjectTypeParsingRulesFileParser; import kbasesearchengine.system.SearchObjectType; import kbasesearchengine.system.TypeFileStorage; import kbasesearchengine.system.TypeMappingParser; import kbasesearchengine.system.TypeStorage; import kbasesearchengine.system.YAMLTypeMappingParser; import kbasesearchengine.test.common.TestCommon; import kbasesearchengine.test.controllers.elasticsearch.ElasticSearchController; import kbasesearchengine.test.controllers.workspace.WorkspaceController; import kbasesearchengine.test.data.TestDataLoader; import us.kbase.auth.AuthToken; import us.kbase.common.service.JsonClientException; import us.kbase.common.service.UObject; import us.kbase.common.test.controllers.mongo.MongoController; import us.kbase.test.auth2.authcontroller.AuthController; import us.kbase.workspace.CreateWorkspaceParams; import us.kbase.workspace.GetObjects2Params; import us.kbase.workspace.ObjectSaveData; import us.kbase.workspace.ObjectSpecification; import us.kbase.workspace.ProvenanceAction; import us.kbase.workspace.RegisterTypespecParams; import us.kbase.workspace.SaveObjectsParams; import us.kbase.workspace.SubAction; import us.kbase.workspace.WorkspaceClient; public class IndexerIntegrationTest { // should add a setting in the worker and coordinator to shorten the wait time for testing // purposes private static AuthController auth = null; private static IndexerWorker worker = null; private static IndexerCoordinator coord = null; private static MongoController mongo; private static MongoClient mc; private static MongoDatabase db; private static ElasticSearchController es; private static IndexingStorage indexStorage; private static WorkspaceController ws; private static MongoDatabase wsdb; private static WorkspaceClient wsCli1; private static AuthToken userToken; private static Path tempDirPath; @BeforeClass public static void prepare() throws Exception { TestCommon.stfuLoggers(); tempDirPath = Paths.get(TestCommon.getTempDir()).resolve("IndexerIntegrationTest"); // should refactor to just use NIO at some point FileUtils.deleteQuietly(tempDirPath.toFile()); tempDirPath.toFile().mkdirs(); final Path searchTypesDir = Files.createDirectories(tempDirPath.resolve("searchtypes")); installSearchTypes(searchTypesDir); final Path mappingsDir = Files.createDirectories(tempDirPath.resolve("searchmappings")); installSearchMappings(mappingsDir); // set up mongo mongo = new MongoController( TestCommon.getMongoExe(), tempDirPath, TestCommon.useWiredTigerEngine()); mc = new MongoClient("localhost:" + mongo.getServerPort()); final String eventDBName = "DataStatus"; db = mc.getDatabase(eventDBName); // set up auth auth = new AuthController( TestCommon.getJarsDir(), "localhost:" + mongo.getServerPort(), "IndexerIntTestAuth", tempDirPath); final URL authURL = new URL("http://localhost:" + auth.getServerPort() + "/testmode"); System.out.println("started auth server at " + authURL); TestCommon.createAuthUser(authURL, "user1", "display1"); TestCommon.createAuthUser(authURL, "user2", "display2"); final String token1 = TestCommon.createLoginToken(authURL, "user1"); final String token2 = TestCommon.createLoginToken(authURL, "user2"); userToken = new AuthToken(token1, "user1"); final AuthToken wsadmintoken = new AuthToken(token2, "user2"); // set up elastic search es = new ElasticSearchController(TestCommon.getElasticSearchExe(), tempDirPath); // set up Workspace ws = new WorkspaceController( TestCommon.getWorkspaceVersion(), TestCommon.getJarsDir(), "localhost:" + mongo.getServerPort(), "IndexerIntegTestWSDB", eventDBName, wsadmintoken.getUserName(), authURL, tempDirPath); System.out.println("Started workspace on port " + ws.getServerPort()); wsdb = mc.getDatabase("IndexerIntegTestWSDB"); URL wsUrl = new URL("http://localhost:" + ws.getServerPort()); wsCli1 = new WorkspaceClient(wsUrl, userToken); wsCli1.setIsInsecureHttpConnectionAllowed(true); final String esIndexPrefix = "test_" + System.currentTimeMillis() + "."; final HttpHost esHostPort = new HttpHost("localhost", es.getServerPort()); final LineLogger logger = new LineLogger() { @Override public void logInfo(String line) { System.out.println(line); } @Override public void logError(String line) { System.err.println(line); } @Override public void logError(Throwable error) { error.printStackTrace(); } @Override public void timeStat(GUID guid, long loadMs, long parseMs, long indexMs) { } }; final Map<String, TypeMappingParser> parsers = ImmutableMap.of( "yaml", new YAMLTypeMappingParser()); final TypeStorage ss = new TypeFileStorage(searchTypesDir, mappingsDir, new ObjectTypeParsingRulesFileParser(), parsers, new FileLister(), logger); final StatusEventStorage storage = new MongoDBStatusEventStorage(db); final WorkspaceClient wsClient = new WorkspaceClient(wsUrl, wsadmintoken); wsClient.setIsInsecureHttpConnectionAllowed(true); final WorkspaceEventHandler weh = new WorkspaceEventHandler( new CloneableWorkspaceClientImpl(wsClient)); final ElasticIndexingStorage esStorage = new ElasticIndexingStorage(esHostPort, FileUtil.getOrCreateSubDir(tempDirPath.toFile(), "esbulk")); esStorage.setIndexNamePrefix(esIndexPrefix); indexStorage = esStorage; System.out.println("Creating indexer worker"); File tempDir = tempDirPath.resolve("WorkerTemp").toFile(); tempDir.mkdirs(); worker = new IndexerWorker("test", Arrays.asList(weh), storage, indexStorage, ss, tempDir, logger, null, 1000); System.out.println("Starting indexer worker"); worker.startIndexer(); System.out.println("Creating indexer coordinator"); coord = new IndexerCoordinator(storage, logger, 10); System.out.println("Starting indexer coordinator"); coord.startIndexer(); loadWSTypes(wsUrl, wsadmintoken); } private static void installSearchTypes(final Path target) throws IOException { installTestFile("EmptyAType.json", target); installTestFile("OneStringThreeKeyNames.yaml", target); installTestFile("TwoVersions.yaml", target); installTestFile("NoIndexingRules.yaml", target); } private static void installTestFile(final String fileName, final Path target) throws IOException { final String file = TestDataLoader.load(fileName); Files.write(target.resolve(fileName), file.getBytes()); } private static void installSearchMappings(final Path target) throws IOException { installTestFile("TwoVersionsMapping.yaml", target); } private static void loadWSTypes(final URL wsURL, final AuthToken wsadmintoken) throws Exception { final WorkspaceClient wc = new WorkspaceClient(wsURL, wsadmintoken); wc.setIsInsecureHttpConnectionAllowed(true); ownModule(wc, "Empty"); ownModule(wc, "OneString"); ownModule(wc, "TwoVersions"); ownModule(wc, "TwoVersionsMapped"); ownModule(wc, "NoIndexingRules"); loadType(wc, "Empty", "Empty.spec", Arrays.asList("AType")); loadType(wc, "OneString", "OneString.spec", Arrays.asList("AType")); loadType(wc, "TwoVersions", "TwoVersions1.spec", Arrays.asList("Type")); loadType(wc, "TwoVersions", "TwoVersions2.spec", Collections.emptyList()); loadType(wc, "TwoVersionsMapped", "TwoVersionsMapped1.spec", Arrays.asList("Type")); loadType(wc, "TwoVersionsMapped", "TwoVersionsMapped2.spec", Collections.emptyList()); loadType(wc, "NoIndexingRules", "NoIndexingRules.spec", Arrays.asList("Type")); } private static void ownModule(final WorkspaceClient wc, final String module) throws IOException, JsonClientException { wc.requestModuleOwnership(module); final Map<String, String> cmd = new HashMap<>(); cmd.put("command", "approveModRequest"); cmd.put("module", module); wc.administer(new UObject(cmd)); } private static void loadType( final WorkspaceClient wc, final String module, final String fileName, final List<String> types) throws IOException, JsonClientException { final String typespec = TestDataLoader.load(fileName); System.out.println(String.format("Loading type %s to workspace", module)); wc.registerTypespec(new RegisterTypespecParams() .withDryrun(0L) .withSpec(typespec) .withNewTypes(types)); System.out.println("released: " + wc.releaseModule(module)); } @AfterClass public static void tearDownClass() throws Exception { if (coord != null) { coord.stop(0); } if (worker != null) { worker.stop(0); } final boolean deleteTempFiles = TestCommon.getDeleteTempFiles(); if (ws != null) { ws.destroy(deleteTempFiles); } if (auth != null) { auth.destroy(deleteTempFiles); } if (mc != null) { mc.close(); } if (mongo != null) { mongo.destroy(deleteTempFiles); } if (es != null) { es.destroy(deleteTempFiles); } if (tempDirPath != null && tempDirPath.toFile().exists() && deleteTempFiles) { FileUtils.deleteQuietly(tempDirPath.toFile()); } } @Before public void init() throws Exception { TestCommon.destroyDB(db); TestCommon.destroyDB(wsdb); indexStorage.dropData(); } @Test public void singleNewVersionWithSourceTags() throws Exception { // a basic test to ensure all the indexer guts are working together. // also tests provenance and source tags wsCli1.createWorkspace(new CreateWorkspaceParams() .withWorkspace("foo") .withMeta(ImmutableMap.of("searchtags", "narrative, refdata"))); wsCli1.saveObjects(new SaveObjectsParams() .withWorkspace("foo") .withObjects(Arrays.asList(new ObjectSaveData() .withData(new UObject(ImmutableMap.of("whee", "wugga"))) .withName("bar") .withType("Empty.AType-1.0") .withProvenance(Arrays.asList(new ProvenanceAction() .withService("serv") .withMethod("meth") .withServiceVer("servver") .withSubactions(Arrays.asList(new SubAction() .withCommit("commit") .withName("serv.meth") )) )) )) ); final long timestamp = getWSTimeStamp("1/1/1"); System.out.println("waiting 5s for event to trickle through the system"); Thread.sleep(5000); // wait for the indexer & worker to process the event final ObjectData indexedObj = indexStorage.getObjectsByIds(TestCommon.set(new GUID("WS:1/1/1"))).get(0); final Instant indexedTimestamp = indexedObj.getTimestamp().get(); final ObjectData expected = ObjectData.getBuilder(new GUID("WS:1/1/1")) .withNullableObjectName("bar") .withNullableType(new SearchObjectType("EmptyAType", 1)) .withNullableCreator(userToken.getUserName()) .withNullableModule("serv") .withNullableMethod("meth") .withNullableCommitHash("commit") .withNullableModuleVersion("servver") .withNullableMD5("3c6e8d4dde8a26a0bfca203228cc6a36") .withNullableTimestamp(indexedTimestamp) .withNullableData(ImmutableMap.of("whee", "wugga")) .withKeyProperty("whee", "wugga") .withSourceTag("narrative") .withSourceTag("refdata") .build(); assertThat("incorrect indexed object", indexedObj, is(expected)); assertWSTimestampCloseToIndexedTimestamp(timestamp, indexedTimestamp); } @Test public void threeKeyNames() throws Exception { // tests that a spec with multiple keynames for the same field works. wsCli1.createWorkspace(new CreateWorkspaceParams().withWorkspace("foo")); wsCli1.saveObjects(new SaveObjectsParams() .withWorkspace("foo") .withObjects(Arrays.asList(new ObjectSaveData() .withData(new UObject(ImmutableMap.of("foo", "bar"))) .withName("bar") .withType("OneString.AType-1.0") )) ); final long timestamp = getWSTimeStamp("1/1/1"); System.out.println("waiting 5s for event to trickle through the system"); Thread.sleep(5000); // wait for the indexer & worker to process the event final ObjectData indexedObj = indexStorage.getObjectsByIds(TestCommon.set(new GUID("WS:1/1/1"))).get(0); final Instant indexedTimestamp = indexedObj.getTimestamp().get(); final ObjectData expected = ObjectData.getBuilder(new GUID("WS:1/1/1")) .withNullableObjectName("bar") .withNullableType(new SearchObjectType("OneString", 1)) .withNullableCreator(userToken.getUserName()) .withNullableMD5("9bb58f26192e4ba00f01e2e7b136bbd8") .withNullableTimestamp(indexedTimestamp) .withNullableData(ImmutableMap.of("foo", "bar")) .withKeyProperty("foo", "bar") .withKeyProperty("foo1", "bar") .withKeyProperty("foo2", "bar") .build(); assertThat("incorrect indexed object", indexedObj, is(expected)); assertWSTimestampCloseToIndexedTimestamp(timestamp, indexedTimestamp); } private void assertWSTimestampCloseToIndexedTimestamp( final long workspaceTimestamp, final Instant indexedTimestamp) { /* it turns out the ws provenance timestamp and obj_info timestamps are not identical * for a freshly saved object since the provenance is saved first. Furthermore, obj_info * timestamps have no millisecond info for backwards compatibility reasons * (should just return longs rather than strings in a UI revamp). * Hence, we just check if the provenance timestamp, for which we do have millisecond * information, is close the the elasticsearch timestamp, which comes from the timestamp * that's used to create the obj_info string. * * This timestamp is passed to the search service via the NEW_VERSION event, and so * has millisecond info. */ TestCommon.assertCloseMS( Instant.ofEpochMilli(workspaceTimestamp), indexedTimestamp, 0, 100); } @Test public void twoVersionsWithoutMapping() throws Exception { // should always use the 2nd version of the spec for any ws type version // since there are no type mappings wsCli1.createWorkspace(new CreateWorkspaceParams() .withWorkspace("foo")); wsCli1.saveObjects(new SaveObjectsParams() .withWorkspace("foo") .withObjects(Arrays.asList( new ObjectSaveData() .withData(new UObject(ImmutableMap.of( "whee", "wugga", "whoo", "thingy", "req", "one"))) .withName("obj1") .withType("TwoVersions.Type-1.0"), new ObjectSaveData() .withData(new UObject(ImmutableMap.of( "whee", "whug", "whoo", "gofasterstripes", "req", 1))) .withName("obj2") .withType("TwoVersions.Type-2.0") )) ); final long timestamp1 = getWSTimeStamp("1/1/1"); final long timestamp2 = getWSTimeStamp("1/2/1"); System.out.println("waiting 5s for events to trickle through the system"); Thread.sleep(5000); // wait for the indexer & worker to process the event final ObjectData indexedObj1 = indexStorage.getObjectsByIds(TestCommon.set(new GUID("WS:1/1/1"))).get(0); final Instant indexedTimestamp1 = indexedObj1.getTimestamp().get(); final ObjectData indexedObj2 = indexStorage.getObjectsByIds(TestCommon.set(new GUID("WS:1/2/1"))).get(0); final Instant indexedTimestamp2 = indexedObj2.getTimestamp().get(); final ObjectData expected1 = ObjectData.getBuilder(new GUID("WS:1/1/1")) .withNullableObjectName("obj1") .withNullableType(new SearchObjectType("TwoVers", 2)) .withNullableCreator(userToken.getUserName()) .withNullableMD5("d20dd9b7a7cd69471b2b13ae7593de90") .withNullableTimestamp(indexedTimestamp1) .withNullableData(ImmutableMap.of("whee", "wugga", "whoo", "thingy")) .withKeyProperty("whee", "wugga") .withKeyProperty("whoo", "thingy") .build(); final ObjectData expected2 = ObjectData.getBuilder(new GUID("WS:1/2/1")) .withNullableObjectName("obj2") .withNullableType(new SearchObjectType("TwoVers", 2)) .withNullableCreator(userToken.getUserName()) .withNullableMD5("51368afbd22bcf7987b98ca28607c67d") .withNullableTimestamp(indexedTimestamp2) .withNullableData(ImmutableMap.of("whee", "whug", "whoo", "gofasterstripes")) .withKeyProperty("whee", "whug") .withKeyProperty("whoo", "gofasterstripes") .build(); assertThat("incorrect indexed object", indexedObj1, is(expected1)); assertWSTimestampCloseToIndexedTimestamp(timestamp1, indexedTimestamp1); assertThat("incorrect indexed object", indexedObj2, is(expected2)); assertWSTimestampCloseToIndexedTimestamp(timestamp2, indexedTimestamp2); } @Test public void twoVersionsWithMapping() throws Exception { wsCli1.createWorkspace(new CreateWorkspaceParams() .withWorkspace("foo")); wsCli1.saveObjects(new SaveObjectsParams() .withWorkspace("foo") .withObjects(Arrays.asList( new ObjectSaveData() .withData(new UObject(ImmutableMap.of( "whee", "wugga", "whoo", "thingy", "req", "one"))) .withName("obj1") .withType("TwoVersionsMapped.Type-1.0"), new ObjectSaveData() .withData(new UObject(ImmutableMap.of( "whee", "whug", "whoo", "gofasterstripes", "req", 1))) .withName("obj2") .withType("TwoVersionsMapped.Type-2.0") )) ); final long timestamp1 = getWSTimeStamp("1/1/1"); final long timestamp2 = getWSTimeStamp("1/2/1"); System.out.println("waiting 5s for events to trickle through the system"); Thread.sleep(5000); // wait for the indexer & worker to process the event final ObjectData indexedObj1 = indexStorage.getObjectsByIds(TestCommon.set(new GUID("WS:1/1/1"))).get(0); final Instant indexedTimestamp1 = indexedObj1.getTimestamp().get(); final ObjectData indexedObj2 = indexStorage.getObjectsByIds(TestCommon.set(new GUID("WS:1/2/1"))).get(0); final Instant indexedTimestamp2 = indexedObj2.getTimestamp().get(); final ObjectData expected1 = ObjectData.getBuilder(new GUID("WS:1/1/1")) .withNullableObjectName("obj1") .withNullableType(new SearchObjectType("TwoVers", 1)) .withNullableCreator(userToken.getUserName()) .withNullableMD5("d20dd9b7a7cd69471b2b13ae7593de90") .withNullableTimestamp(indexedTimestamp1) .withNullableData(ImmutableMap.of("whee", "wugga")) .withKeyProperty("whee", "wugga") .build(); final ObjectData expected2 = ObjectData.getBuilder(new GUID("WS:1/2/1")) .withNullableObjectName("obj2") .withNullableType(new SearchObjectType("TwoVers", 2)) .withNullableCreator(userToken.getUserName()) .withNullableMD5("51368afbd22bcf7987b98ca28607c67d") .withNullableTimestamp(indexedTimestamp2) .withNullableData(ImmutableMap.of("whee", "whug", "whoo", "gofasterstripes")) .withKeyProperty("whee", "whug") .withKeyProperty("whoo", "gofasterstripes") .build(); assertThat("incorrect indexed object", indexedObj1, is(expected1)); assertWSTimestampCloseToIndexedTimestamp(timestamp1, indexedTimestamp1); assertThat("incorrect indexed object", indexedObj2, is(expected2)); assertWSTimestampCloseToIndexedTimestamp(timestamp2, indexedTimestamp2); } @Test public void noIndexingRules() throws Exception { // tests that a search spec without any indexing rules still indexes the general object // properties wsCli1.createWorkspace(new CreateWorkspaceParams() .withWorkspace("foo")); wsCli1.saveObjects(new SaveObjectsParams() .withWorkspace("foo") .withObjects(Arrays.asList( new ObjectSaveData() .withData(new UObject(ImmutableMap.of( "whee", "wugga", "whoo", "thingy", "req", "one"))) .withName("obj1") .withType("NoIndexingRules.Type-1.0") )) ); final long timestamp = getWSTimeStamp("1/1/1"); System.out.println("waiting 5s for events to trickle through the system"); Thread.sleep(5000); // wait for the indexer & worker to process the event final ObjectData indexedObj = indexStorage.getObjectsByIds(TestCommon.set(new GUID("WS:1/1/1"))).get(0); final Instant indexedTimestamp = indexedObj.getTimestamp().get(); final ObjectData expected = ObjectData.getBuilder(new GUID("WS:1/1/1")) .withNullableObjectName("obj1") .withNullableType(new SearchObjectType("NoIndexRules", 1)) .withNullableCreator(userToken.getUserName()) .withNullableMD5("d20dd9b7a7cd69471b2b13ae7593de90") .withNullableTimestamp(indexedTimestamp) .build(); assertThat("incorrect indexed object", indexedObj, is(expected)); assertWSTimestampCloseToIndexedTimestamp(timestamp, indexedTimestamp); } private long getWSTimeStamp(final String ref) throws IOException, JsonClientException { return wsCli1.getObjects2(new GetObjects2Params() .withNoData(1L) .withObjects(Arrays.asList(new ObjectSpecification().withRef(ref)))) .getData().get(0).getEpoch(); } }
package Game; import java.util.ArrayList; import java.util.List; import Exceptions.InvalidHandException; /** * PokerHand is a class that stores a player's hand and determines the best * hand that can be made with the player's given cards. * * @author Zach */ public class PokerHand { private static final int MAX_HAND_SIZE = 7; private List<Card> hand; private Card[] pocketCards; //Cards dealt to this player only private String bestHand; //Name of the best hand that can be made with given cards private int bestHandValue; //ranked from 1-9 with 9 being the best private boolean folded; private boolean winner; /** * Constructor that initializes the pocket cards, best hand, best hand value, * folded indication and winning indication of the given hand. */ public PokerHand () { hand = new ArrayList<Card>(); pocketCards = new Card[2]; bestHand = ""; bestHandValue = 0; folded = false; winner = false; } /** * Adds a new Card to the Poker Hand. Throws an InvalidHandException in the * instance that the new card added exceeds the maximum number of cards allowed * by a standard Texas Hold 'Em hand. * * @param nextCard * @throws InvalidHandException */ public void addCard(Card nextCard) { if(hand.size() >= MAX_HAND_SIZE) throw new InvalidHandException("The poker hand cannot exceed 7 cards"); if(hand.isEmpty()) { hand.add(nextCard); pocketCards[0] = nextCard; } else { if(hand.size() < 2) pocketCards[1] = nextCard; int indexToAddAt = 0; while(indexToAddAt < hand.size() && nextCard.getRankValue() < hand.get(indexToAddAt).getRankValue()) { indexToAddAt++; } if(indexToAddAt == hand.size()) hand.add(nextCard); else hand.add(indexToAddAt, nextCard); } } /** * Sets the folded condition of the Poker Hand * * @param folded */ public void fold(boolean folded) { this.folded = folded; } /** * Returns the folded condition of the Poker Hand. * * @return folded */ public boolean hasFolded() { return folded; } /** * Returns the Poker Hand * * @return hand */ public List<Card> getHand() { return hand; } /** * Initializes the best hand possible given the cards contained by the hand. */ public void setBestHand() { bestHand = bestHand(); } /** * Returns the string representation of the best hand that can be made with * the given cards provided in the hand. * * @return bestHand */ public String getBestHand() { return bestHand; } /** * Returns the numerical representation of the best hand that can be made * with the given cards. * * @return bestHandValue */ public int getBestHandValue() { return bestHandValue; } /** * sets the winning indication of the hand. * * @param winner */ public void setWinner(boolean winner) { this.winner = winner; } /** * Returns the winning indication of the hand. * * @return winner */ public boolean isWinner() { return winner; } /** * Returns an array of Cards containing the Cards dealt to the player * that can only be used by the player to construct the best hand. * * @return pocketCards */ public Card[] getPocketCards() { return pocketCards; } /** * Returns a string representation of the best hand that can be made with the * given set of cards, and sets the numerical valuation of the best hand that * can be made with the given set of cards. * * @return bestHand */ private String bestHand() { if(hand.size() < 2) throw new InvalidHandException("The hand must contain at least two cards"); if(isStraight()) { if(isStraightFlush()) { if(isRoyalFlush()) { bestHandValue = 9; return "Royal Flush!!!"; } bestHandValue = 8; return "Straight Flush"; } bestHandValue = 4; return "Straight"; } if(isFlush()) { bestHandValue = 5; return "Flush"; } if(isPair()) { if(isTwoPair()) { if(isFullHouse()) { if(isFourOfAKind()) { bestHandValue = 7; return "Four of a Kind"; } bestHandValue = 6; return "Full House"; } bestHandValue = 2; return "Two Pair"; } if(isThreeOfAKind()) { if(isFourOfAKind()) { bestHandValue = 7; return "Four of a Kind"; } bestHandValue = 3; return "Three of a Kind"; } bestHandValue = 1; return "Pair"; } bestHandValue = 0; return "High Card"; } /** * Returns an indication that the best hand that can be made with the given * set of cards is a straight and rearranges the set of cards to indicate * the straight. * * @return isStraight */ private boolean isStraight() { if(hand.size() < 5) return false; int currIndex = 0; boolean isStraight = false; //A straight must be 5 cards long and the max size of a Poker Hand is 7 Cards while(!isStraight && currIndex < 3) { isStraight = (hand.get(currIndex).getRankValue() - hand.get(currIndex+1).getRankValue() == 1); int i = currIndex; //Iterate through the next 4 cards to see if they're sequential while(isStraight && i < (currIndex+3)) { try { i++; isStraight = (hand.get(i).getRankValue() - hand.get(i+1).getRankValue() == 1); } catch (IndexOutOfBoundsException e) { return false; } } currIndex++; } currIndex--; //move the straight to indexes 0-4 if(isStraight) { List<Card> straight = new ArrayList<>(); for(int i = currIndex; i <= currIndex+4; i++) { hand.get(i).setIsUsed(true); straight.add(hand.get(i)); } for(int i = 0; i < hand.size(); i++) { if(!hand.get(i).getIsUsed()) straight.add(hand.get(i)); } hand = straight; } return isStraight; } /** * Returns an indication that the best hand that can be made with the given * set of cards is a flush and rearranges the set of cards to indicate * the flush. * * @return isFlush */ private boolean isFlush() { if(hand.size() < 5) return false; int spadeCount = 0, heartCount = 0, clubCount = 0, diamondCount = 0; //Determine if there are 5 cards of any suit for(int i = 0; i < hand.size(); i++) { switch(hand.get(i).getSuit()) { case 's': spadeCount++; break; case 'h': heartCount++; break; case 'c': clubCount++; break; case 'd': diamondCount++; break; } } boolean isFlush = ((spadeCount >= 5) || (heartCount >= 5) || (clubCount >= 5) || (diamondCount >= 5)); //Determine the suit of the flush if(isFlush) { char flushSuit; if(spadeCount >= 5) flushSuit = 's'; else if(heartCount >= 5) flushSuit = 'h'; else if(clubCount >= 5) flushSuit = 'c'; else flushSuit = 'd'; List<Card> flush = new ArrayList<>(); //Move the flush to indices 0-4 in descending order for(int i = 0; i < hand.size(); i++) { if(hand.get(i).getSuit() == flushSuit) { hand.get(i).setIsUsed(true); flush.add(hand.get(i)); } } for(int i = 0; i < hand.size(); i++) { if(!hand.get(i).getIsUsed()) { flush.add(hand.get(i)); } } hand = flush; } return isFlush; } /** * Returns an indication that the best hand that can be made with the given * set of cards is a straight flush and rearranges the set of cards to indicate * the straight flush. * * @return isStraightFlush */ private boolean isStraightFlush() { int currCard = 1; int lastCardOfStraight = 4; boolean isStraightFlush = hand.get(currCard).getSuit() == hand.get(currCard-1).getSuit(); currCard++; //Check to see if the hand is a straight flush given it's current order while(isStraightFlush && currCard <= lastCardOfStraight) { isStraightFlush = hand.get(currCard).getSuit() == hand.get(currCard-1).getSuit(); currCard++; } currCard--; if(isStraightFlush) return isStraightFlush; else { if(lastCardOfStraight == hand.size()-1) return isStraightFlush; //Check to see if one of the Cards in the straight is paired with the card that //would complete the straight flush and then swap them if so. int unusedCards = lastCardOfStraight+1; while(!isStraightFlush && unusedCards < hand.size()) { if(hand.get(unusedCards).getRank() == hand.get(currCard).getRank()) { isStraightFlush = hand.get(unusedCards).getRank() == hand.get(currCard).getRank(); } unusedCards++; } unusedCards--; if(!isStraightFlush) return isStraightFlush; else { //recursively determine if the hand is a straight flush hand.get(currCard).setIsUsed(false); hand.get(unusedCards).setIsUsed(true); switchSets(currCard, 1, unusedCards, 1); return(isStraightFlush()); } } } /** * Returns an indication that the best hand that can be made with the given * set of cards is a royal flush and rearranges the set of cards to indicate * the flush. * * @return isRoyalFlush */ private boolean isRoyalFlush() { return hand.get(0).getRank() == 'A'; } /** * @return Result of whether or not the PokerHand contains a pair */ private boolean isPair() { int startingIndex = 0; int nextPairIndex = indexOfNextPair(startingIndex); if(nextPairIndex != -1) shiftNextPair(nextPairIndex); return nextPairIndex != -1; } /** * Returns an indication that the best hand that can be made with the given * set of cards is a three of a kind and rearranges the set of cards to indicate * the flush. * * @return isThreeOfAKind */ private boolean isThreeOfAKind() { if(hand.size() < 3) return false; int pairLastIndex = 0; int thirdMatchIndex = indexOfNextMatch(pairLastIndex); if(thirdMatchIndex != -1) shiftNextMatch(pairLastIndex, thirdMatchIndex); return thirdMatchIndex != -1; } /** * Returns an indication that the best hand that can be made with the given * set of cards is a four of a kind and rearranges the set of cards to indicate * the flush. * * @return isFourOfAKind */ private boolean isFourOfAKind() { if(hand.size() < 4) return false; int threeOfKindLastIndex = 1; int fourthMatchIndex = indexOfNextMatch(threeOfKindLastIndex); if(fourthMatchIndex != -1) shiftNextMatch(threeOfKindLastIndex, fourthMatchIndex); return fourthMatchIndex != -1; } /** * Returns an indication that the best hand that can be made with the given * set of cards is a two pair and rearranges the set of cards to indicate * the flush. * * @return isTwoPair */ private boolean isTwoPair() { if(hand.size() < 4) return false; int startingIndex = 2; int secondPairIndex = indexOfNextPair(startingIndex); if(secondPairIndex != -1) { if(hand.get(startingIndex-1).getRankValue() == hand.get(secondPairIndex).getRankValue()) return false; shiftNextPair(secondPairIndex); if(hand.get(0).getRankValue() < hand.get(startingIndex).getRankValue()) switchSets(0, 2, startingIndex, 2); } return secondPairIndex != -1; } /** * Returns an indication that the best hand that can be made with the given * set of cards is a flush and rearranges the set of cards to indicate * the flush. * * @return isFullHouse */ private boolean isFullHouse() { if(hand.size() < 5) return false; int pairOneIndex = 0; int pairTwoIndex = 2; int pairThreeIndex = indexOfNextPair(pairTwoIndex + 2); int indexOfTripOne = indexOfNextMatch(pairOneIndex); int indexOfTripTwo = indexOfNextMatch(pairTwoIndex); if(indexOfTripOne != -1) { shiftNextMatch(pairOneIndex, indexOfTripOne); return indexOfTripOne != -1; } else if(indexOfTripTwo != -1) { shiftNextMatch(pairTwoIndex, indexOfTripTwo); switchSets(pairOneIndex, 2, pairTwoIndex, 3); return indexOfTripTwo != -1; } else if(pairThreeIndex != -1) { int indexOfTripThree = indexOfNextMatch(pairThreeIndex); if(indexOfTripThree != -1) { switchSets(pairTwoIndex, 2, pairThreeIndex, 3); pairThreeIndex = pairTwoIndex; //Since set two and three switched positions switchSets(pairOneIndex, 2, pairThreeIndex, 3); return indexOfTripThree != -1; } } return false; } /** * Returns the index that the next pair starts at proceeding the index * provided as a parameter. * * @param startingIndex * @return First index of the next Pair */ private int indexOfNextPair(int startingIndex) { if(startingIndex == (hand.size()-1)) return -1; boolean foundPair = hand.get(startingIndex).getRankValue() == hand.get(startingIndex+1).getRankValue(); int currIndex = startingIndex+1; //Becuase the hand is sorted in descending order, it is safe to assume pairs will be together while(!foundPair && currIndex < hand.size()-1) { foundPair = hand.get(currIndex).getRankValue() == hand.get(currIndex+1).getRankValue(); currIndex++; } if(foundPair) return (currIndex-1); else return -1; } /** * Moves the pair beginning at the given index, provided by the parameter, * to the next available position in the Poker Hand. * * @param indexToShiftFrom */ private void shiftNextPair(int indexToShiftFrom) { int currIndex = 0; //find the next available position to shift the next pair to //Any pairs prior to this pair will be of higher rank because the cards or ordered descending while(hand.get(currIndex).getIsUsed()) currIndex++; List<Card> tempHand = new ArrayList<>(); //Add all previous sets to retain order for(int i = 0; i < currIndex; i++) tempHand.add(hand.get(i)); hand.get(indexToShiftFrom).setIsUsed(true); tempHand.add(hand.get(indexToShiftFrom)); hand.get(indexToShiftFrom+1).setIsUsed(true); tempHand.add(hand.get(indexToShiftFrom+1)); //Add all the kickers in their descending order while(currIndex < hand.size()) { if(!hand.get(currIndex).getIsUsed()) tempHand.add(hand.get(currIndex)); currIndex++; } hand = tempHand; } /** * Returns the index of the next card whose rank matches the rank of the * provided parameter index; otherwise returns -1. * * @param lastMatched * @return currIndex */ private int indexOfNextMatch(int lastMatched) { if(lastMatched == hand.size()-2) return -1; lastMatched++; int currIndex = lastMatched+1; boolean hasNextMatch = hand.get(currIndex).getRankValue() == hand.get(lastMatched).getRankValue(); currIndex++; while(!hasNextMatch && currIndex < hand.size()) { hasNextMatch = hand.get(currIndex).getRankValue() == hand.get(lastMatched).getRankValue(); currIndex++; } currIndex--; if(hasNextMatch) return currIndex; else return -1; } /** * Shifts the card at indexOfNextMatch to one position after the index of * the lastMatched Card. * * @param lastMatched * @param indexOfNextMatch */ private void shiftNextMatch(int lastMatched, int indexOfNextMatch) { lastMatched++; List<Card> tempHand = new ArrayList<>(); for(int i = 0; i <= lastMatched; i++) tempHand.add(hand.get(i)); hand.get(indexOfNextMatch).setIsUsed(true); tempHand.add(hand.get(indexOfNextMatch)); lastMatched++; for(int i = lastMatched; i < hand.size(); i++) { if(i != indexOfNextMatch) tempHand.add(hand.get(i)); } hand = tempHand; } /** * Switches the first set of matching Cards with the second set of matching cards. * * @param setOneIndex * @param setOneLength * @param setTwoIndex * @param setTwoLength */ private void switchSets(int setOneIndex, int setOneLength, int setTwoIndex, int setTwoLength) { List<Card> tempHand = new ArrayList<>(); int currIndex = 0; //Add all cards before the first set while(currIndex < setOneIndex) { tempHand.add(hand.get(currIndex)); currIndex++; } //Add the second set to where the first set is currently positioned for(int i = 0; i < setTwoLength; i++) { tempHand.add(hand.get(setTwoIndex+i)); currIndex++; } //Add all cards between the first and second set while(currIndex < setTwoIndex + (setTwoLength - setOneLength)) { tempHand.add(hand.get(currIndex)); currIndex++; } //Add set one where set two was originally positioned for(int i = 0; i < setOneLength; i++) { tempHand.add(hand.get(setOneIndex+i)); currIndex++; } //Add the remaining Cards positioned after the second set while(currIndex < hand.size()) { tempHand.add(hand.get(currIndex)); currIndex++; } hand = tempHand; } @Override public String toString() { StringBuilder handToString = new StringBuilder(); for(int i = 0; i < hand.size(); i++) { handToString.append(hand.get(i)); handToString.append(" "); } handToString.append(bestHand); if(isWinner()) { handToString.append(" "); handToString.append("WINNER!"); } return handToString.toString(); } }
/* * This file is generated by jOOQ. */ package com.oneops.crawler.jooq.cms.tables; import com.oneops.crawler.jooq.cms.Indexes; import com.oneops.crawler.jooq.cms.Keys; import com.oneops.crawler.jooq.cms.Kloopzcm; import com.oneops.crawler.jooq.cms.tables.records.DjRfcRelationRecord; import java.sql.Timestamp; import java.util.Arrays; import java.util.List; import javax.annotation.Generated; import org.jooq.Field; import org.jooq.ForeignKey; import org.jooq.Index; import org.jooq.Name; import org.jooq.Schema; import org.jooq.Table; import org.jooq.TableField; import org.jooq.UniqueKey; import org.jooq.impl.DSL; import org.jooq.impl.TableImpl; /** * This class is generated by jOOQ. */ @Generated( value = { "http://www.jooq.org", "jOOQ version:3.10.0" }, comments = "This class is generated by jOOQ" ) @SuppressWarnings({ "all", "unchecked", "rawtypes" }) public class DjRfcRelation extends TableImpl<DjRfcRelationRecord> { private static final long serialVersionUID = 416337632; /** * The reference instance of <code>kloopzcm.dj_rfc_relation</code> */ public static final DjRfcRelation DJ_RFC_RELATION = new DjRfcRelation(); /** * The class holding records for this type */ @Override public Class<DjRfcRelationRecord> getRecordType() { return DjRfcRelationRecord.class; } /** * The column <code>kloopzcm.dj_rfc_relation.rfc_id</code>. */ public final TableField<DjRfcRelationRecord, Long> RFC_ID = createField("rfc_id", org.jooq.impl.SQLDataType.BIGINT.nullable(false), this, ""); /** * The column <code>kloopzcm.dj_rfc_relation.release_id</code>. */ public final TableField<DjRfcRelationRecord, Long> RELEASE_ID = createField("release_id", org.jooq.impl.SQLDataType.BIGINT.nullable(false), this, ""); /** * The column <code>kloopzcm.dj_rfc_relation.ns_id</code>. */ public final TableField<DjRfcRelationRecord, Long> NS_ID = createField("ns_id", org.jooq.impl.SQLDataType.BIGINT.nullable(false), this, ""); /** * The column <code>kloopzcm.dj_rfc_relation.ci_relation_id</code>. */ public final TableField<DjRfcRelationRecord, Long> CI_RELATION_ID = createField("ci_relation_id", org.jooq.impl.SQLDataType.BIGINT.nullable(false), this, ""); /** * The column <code>kloopzcm.dj_rfc_relation.from_rfc_id</code>. */ public final TableField<DjRfcRelationRecord, Long> FROM_RFC_ID = createField("from_rfc_id", org.jooq.impl.SQLDataType.BIGINT, this, ""); /** * The column <code>kloopzcm.dj_rfc_relation.from_ci_id</code>. */ public final TableField<DjRfcRelationRecord, Long> FROM_CI_ID = createField("from_ci_id", org.jooq.impl.SQLDataType.BIGINT.nullable(false), this, ""); /** * The column <code>kloopzcm.dj_rfc_relation.relation_id</code>. */ public final TableField<DjRfcRelationRecord, Integer> RELATION_ID = createField("relation_id", org.jooq.impl.SQLDataType.INTEGER.nullable(false), this, ""); /** * The column <code>kloopzcm.dj_rfc_relation.relation_goid</code>. */ public final TableField<DjRfcRelationRecord, String> RELATION_GOID = createField("relation_goid", org.jooq.impl.SQLDataType.VARCHAR(256).nullable(false), this, ""); /** * The column <code>kloopzcm.dj_rfc_relation.to_rfc_id</code>. */ public final TableField<DjRfcRelationRecord, Long> TO_RFC_ID = createField("to_rfc_id", org.jooq.impl.SQLDataType.BIGINT, this, ""); /** * The column <code>kloopzcm.dj_rfc_relation.to_ci_id</code>. */ public final TableField<DjRfcRelationRecord, Long> TO_CI_ID = createField("to_ci_id", org.jooq.impl.SQLDataType.BIGINT.nullable(false), this, ""); /** * The column <code>kloopzcm.dj_rfc_relation.action_id</code>. */ public final TableField<DjRfcRelationRecord, Integer> ACTION_ID = createField("action_id", org.jooq.impl.SQLDataType.INTEGER.nullable(false), this, ""); /** * The column <code>kloopzcm.dj_rfc_relation.created_by</code>. */ public final TableField<DjRfcRelationRecord, String> CREATED_BY = createField("created_by", org.jooq.impl.SQLDataType.VARCHAR(200), this, ""); /** * The column <code>kloopzcm.dj_rfc_relation.updated_by</code>. */ public final TableField<DjRfcRelationRecord, String> UPDATED_BY = createField("updated_by", org.jooq.impl.SQLDataType.VARCHAR(200), this, ""); /** * The column <code>kloopzcm.dj_rfc_relation.execution_order</code>. */ public final TableField<DjRfcRelationRecord, Short> EXECUTION_ORDER = createField("execution_order", org.jooq.impl.SQLDataType.SMALLINT.nullable(false), this, ""); /** * The column <code>kloopzcm.dj_rfc_relation.is_active_in_release</code>. */ public final TableField<DjRfcRelationRecord, Boolean> IS_ACTIVE_IN_RELEASE = createField("is_active_in_release", org.jooq.impl.SQLDataType.BOOLEAN.nullable(false).defaultValue(org.jooq.impl.DSL.field("true", org.jooq.impl.SQLDataType.BOOLEAN)), this, ""); /** * The column <code>kloopzcm.dj_rfc_relation.last_rfc_id</code>. */ public final TableField<DjRfcRelationRecord, Long> LAST_RFC_ID = createField("last_rfc_id", org.jooq.impl.SQLDataType.BIGINT, this, ""); /** * The column <code>kloopzcm.dj_rfc_relation.comments</code>. */ public final TableField<DjRfcRelationRecord, String> COMMENTS = createField("comments", org.jooq.impl.SQLDataType.VARCHAR(2000), this, ""); /** * The column <code>kloopzcm.dj_rfc_relation.created</code>. */ public final TableField<DjRfcRelationRecord, Timestamp> CREATED = createField("created", org.jooq.impl.SQLDataType.TIMESTAMP.nullable(false).defaultValue(org.jooq.impl.DSL.field("now()", org.jooq.impl.SQLDataType.TIMESTAMP)), this, ""); /** * The column <code>kloopzcm.dj_rfc_relation.updated</code>. */ public final TableField<DjRfcRelationRecord, Timestamp> UPDATED = createField("updated", org.jooq.impl.SQLDataType.TIMESTAMP.nullable(false).defaultValue(org.jooq.impl.DSL.field("now()", org.jooq.impl.SQLDataType.TIMESTAMP)), this, ""); /** * Create a <code>kloopzcm.dj_rfc_relation</code> table reference */ public DjRfcRelation() { this(DSL.name("dj_rfc_relation"), null); } /** * Create an aliased <code>kloopzcm.dj_rfc_relation</code> table reference */ public DjRfcRelation(String alias) { this(DSL.name(alias), DJ_RFC_RELATION); } /** * Create an aliased <code>kloopzcm.dj_rfc_relation</code> table reference */ public DjRfcRelation(Name alias) { this(alias, DJ_RFC_RELATION); } private DjRfcRelation(Name alias, Table<DjRfcRelationRecord> aliased) { this(alias, aliased, null); } private DjRfcRelation(Name alias, Table<DjRfcRelationRecord> aliased, Field<?>[] parameters) { super(alias, null, aliased, parameters, ""); } /** * {@inheritDoc} */ @Override public Schema getSchema() { return Kloopzcm.KLOOPZCM; } /** * {@inheritDoc} */ @Override public List<Index> getIndexes() { return Arrays.<Index>asList(Indexes.DJ_RFC_REL_FCI_IDX, Indexes.DJ_RFC_REL_FCIRELTOCI_IDX, Indexes.DJ_RFC_REL_FRFC_IDX, Indexes.DJ_RFC_REL_NS_IDX, Indexes.DJ_RFC_REL_R_IDX, Indexes.DJ_RFC_REL_RL_IDX, Indexes.DJ_RFC_REL_TCI_IDX, Indexes.DJ_RFC_REL_TCIRLSFROMCI_IDX, Indexes.DJ_RFC_REL_TRFC_IDX, Indexes.DJ_RFC_RELATION_CRID_IDX, Indexes.DJ_RFC_RELATION_FROMIDX, Indexes.DJ_RFC_RELATION_PK, Indexes.DJ_RFC_RELATION_TOIDX); } /** * {@inheritDoc} */ @Override public UniqueKey<DjRfcRelationRecord> getPrimaryKey() { return Keys.DJ_RFC_RELATION_PK; } /** * {@inheritDoc} */ @Override public List<UniqueKey<DjRfcRelationRecord>> getKeys() { return Arrays.<UniqueKey<DjRfcRelationRecord>>asList(Keys.DJ_RFC_RELATION_PK); } /** * {@inheritDoc} */ @Override public List<ForeignKey<DjRfcRelationRecord, ?>> getReferences() { return Arrays.<ForeignKey<DjRfcRelationRecord, ?>>asList(Keys.DJ_RFC_RELATION__DJ_RELATION_RFC_RELID_FK, Keys.DJ_RFC_RELATION__DJ_RFC_RELATION_NS_FK, Keys.DJ_RFC_RELATION__DJ_RFC_CI_DJ_RFC_RELATION_FK, Keys.DJ_RFC_RELATION__DJ_RELATION_RFC_RID_FK, Keys.DJ_RFC_RELATION__DJ_RFC_CI_DJ_RFC_RELATION_FK1, Keys.DJ_RFC_RELATION__DJ_RELATION_RFC_ACTID_FK); } /** * {@inheritDoc} */ @Override public DjRfcRelation as(String alias) { return new DjRfcRelation(DSL.name(alias), this); } /** * {@inheritDoc} */ @Override public DjRfcRelation as(Name alias) { return new DjRfcRelation(alias, this); } /** * Rename this table */ @Override public DjRfcRelation rename(String name) { return new DjRfcRelation(DSL.name(name), null); } /** * Rename this table */ @Override public DjRfcRelation rename(Name name) { return new DjRfcRelation(name, null); } }
/** * * Copyright 2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.geronimo.kernel.basic; import java.lang.reflect.Method; import java.util.Iterator; import java.util.Map; import java.util.Set; import java.util.HashSet; import java.beans.Introspector; import javax.management.ObjectName; import net.sf.cglib.asm.Type; import net.sf.cglib.core.Signature; import net.sf.cglib.proxy.MethodInterceptor; import net.sf.cglib.proxy.MethodProxy; import net.sf.cglib.reflect.FastClass; import org.apache.geronimo.gbean.GOperationSignature; import org.apache.geronimo.gbean.GBeanInfo; import org.apache.geronimo.gbean.GAttributeInfo; import org.apache.geronimo.gbean.GOperationInfo; import org.apache.geronimo.gbean.runtime.GBeanInstance; import org.apache.geronimo.gbean.runtime.RawInvoker; import org.apache.geronimo.kernel.Kernel; import org.apache.geronimo.kernel.management.State; import org.apache.geronimo.kernel.proxy.DeadProxyException; import org.apache.geronimo.kernel.proxy.ProxyManager; import org.apache.geronimo.kernel.proxy.GeronimoManagedBean; /** * @version $Rev$ $Date$ */ public class ProxyMethodInterceptor implements MethodInterceptor { /** * Type of the proxy interface */ private final Class proxyType; /** * The object name to which we are connected. */ private final ObjectName objectName; /** * GBeanInvokers keyed on the proxy interface method index */ private ProxyInvoker[] gbeanInvokers; public ProxyMethodInterceptor(Class proxyType, Kernel kernel, ObjectName objectName) { assert proxyType != null; assert kernel != null; assert objectName != null; this.proxyType = proxyType; this.objectName = objectName; gbeanInvokers = createGBeanInvokers(kernel, objectName); } public synchronized void destroy() { gbeanInvokers = null; } public ObjectName getObjectName() { return objectName; } public final Object intercept(final Object object, final Method method, final Object[] args, final MethodProxy proxy) throws Throwable { ProxyInvoker gbeanInvoker; int interfaceIndex = proxy.getSuperIndex(); synchronized (this) { if (gbeanInvokers == null) { throw new DeadProxyException("Proxy is no longer valid"); } gbeanInvoker = gbeanInvokers[interfaceIndex]; } if (gbeanInvoker == null) { throw new UnsupportedOperationException("No implementation method: objectName=" + objectName + ", method=" + method); } return gbeanInvoker.invoke(objectName, args); } private ProxyInvoker[] createGBeanInvokers(Kernel kernel, ObjectName objectName) { ProxyInvoker[] invokers; try { RawInvoker rawInvoker = (RawInvoker) kernel.getAttribute(objectName, GBeanInstance.RAW_INVOKER); invokers = createRawGBeanInvokers(rawInvoker, proxyType); } catch (Exception e) { invokers = createKernelGBeanInvokers(kernel, objectName, proxyType); } // handle equals, hashCode and toString directly here try { invokers[getSuperIndex(proxyType, proxyType.getMethod("equals", new Class[]{Object.class}))] = new EqualsInvoke(kernel.getProxyManager()); invokers[getSuperIndex(proxyType, proxyType.getMethod("hashCode", null))] = new HashCodeInvoke(); invokers[getSuperIndex(proxyType, proxyType.getMethod("toString", null))] = new ToStringInvoke(proxyType.getName()); if(GeronimoManagedBean.class.isAssignableFrom(proxyType)) { invokers[getSuperIndex(proxyType, proxyType.getMethod("getState", null))] = new GetStateInvoke(kernel); invokers[getSuperIndex(proxyType, proxyType.getMethod("getStateInstance", null))] = new GetStateInstanceInvoke(kernel); invokers[getSuperIndex(proxyType, proxyType.getMethod("start", null))] = new StartInvoke(kernel); invokers[getSuperIndex(proxyType, proxyType.getMethod("startRecursive", null))] = new StartRecursiveInvoke(kernel); invokers[getSuperIndex(proxyType, proxyType.getMethod("stop", null))] = new StopInvoke(kernel); invokers[getSuperIndex(proxyType, proxyType.getMethod("getStartTime", null))] = new GetStartTimeInvoke(kernel); invokers[getSuperIndex(proxyType, proxyType.getMethod("getObjectName", null))] = new GetObjectNameInvoke(); } } catch (Exception e) { // this can not happen... all classes must implement equals, hashCode and toString throw new AssertionError(e); } return invokers; } private ProxyInvoker[] createRawGBeanInvokers(RawInvoker rawInvoker, Class proxyType) { Map operations = rawInvoker.getOperationIndex(); Map attributes = rawInvoker.getAttributeIndex(); // build the method lookup table FastClass fastClass = FastClass.create(proxyType); ProxyInvoker[] invokers = new ProxyInvoker[fastClass.getMaxIndex() + 1]; Method[] methods = proxyType.getMethods(); for (int i = 0; i < methods.length; i++) { Method method = methods[i]; int interfaceIndex = getSuperIndex(proxyType, method); if (interfaceIndex >= 0) { invokers[interfaceIndex] = createRawGBeanInvoker(rawInvoker, method, operations, attributes); } } return invokers; } private ProxyInvoker createRawGBeanInvoker(RawInvoker rawInvoker, Method method, Map operations, Map attributes) { if (operations.containsKey(new GOperationSignature(method))) { int methodIndex = ((Integer) operations.get(new GOperationSignature(method))).intValue(); return new RawOperationInvoker(rawInvoker, methodIndex); } if (method.getName().startsWith("get")) { String attributeName = method.getName().substring(3); Integer methodIndex = ((Integer) attributes.get(attributeName)); if (methodIndex != null) { return new RawGetAttributeInvoker(rawInvoker, methodIndex.intValue()); } methodIndex = getMethodIndex(attributes, attributeName); if (methodIndex != null) { return new RawGetAttributeInvoker(rawInvoker, methodIndex.intValue()); } } if (method.getName().startsWith("is")) { String attributeName = method.getName().substring(2); Integer methodIndex = ((Integer) attributes.get(attributeName)); if (methodIndex != null) { return new RawGetAttributeInvoker(rawInvoker, methodIndex.intValue()); } methodIndex = getMethodIndex(attributes, attributeName); if (methodIndex != null) { return new RawGetAttributeInvoker(rawInvoker, methodIndex.intValue()); } } if (method.getName().startsWith("set")) { String attributeName = method.getName().substring(3); Integer methodIndex = ((Integer) attributes.get(attributeName)); if (methodIndex != null) { return new RawSetAttributeInvoker(rawInvoker, methodIndex.intValue()); } methodIndex = getMethodIndex(attributes, attributeName); if (methodIndex != null) { return new RawSetAttributeInvoker(rawInvoker, methodIndex.intValue()); } } return null; } private ProxyInvoker[] createKernelGBeanInvokers(Kernel kernel, ObjectName objectName, Class proxyType) { GBeanInfo info; try { info = kernel.getGBeanInfo(objectName); } catch (Exception e) { throw new IllegalArgumentException("Could not get GBeanInfo for target object: " + objectName); } // build attributeName->attributeInfo map Set attributeInfos = info.getAttributes(); Set attributeNames = new HashSet(attributeInfos.size()); for (Iterator iterator = attributeInfos.iterator(); iterator.hasNext();) { GAttributeInfo attributeInfo = (GAttributeInfo) iterator.next(); attributeNames.add(attributeInfo.getName()); } // build operationSignature->operationInfo map Set operationInfos = info.getOperations(); Set operationSignatures = new HashSet(operationInfos.size()); for (Iterator iterator = operationInfos.iterator(); iterator.hasNext();) { GOperationInfo operationInfo = (GOperationInfo) iterator.next(); operationSignatures.add(new GOperationSignature(operationInfo.getName(), operationInfo.getParameterList())); } // build the method lookup table FastClass fastClass = FastClass.create(proxyType); ProxyInvoker[] invokers = new ProxyInvoker[fastClass.getMaxIndex() + 1]; Method[] methods = proxyType.getMethods(); for (int i = 0; i < methods.length; i++) { Method method = methods[i]; int interfaceIndex = getSuperIndex(proxyType, method); if (interfaceIndex >= 0) { invokers[interfaceIndex] = createJMXGBeanInvoker(kernel, method, operationSignatures, attributeNames); } } return invokers; } private ProxyInvoker createJMXGBeanInvoker(Kernel kernel, Method method, Set operationSignatures, Set attributeNames) { if (operationSignatures.contains(new GOperationSignature(method))) { return new KernelOperationInvoker(kernel, method); } String name = method.getName(); if (name.startsWith("get")) { String attributeName = method.getName().substring(3); if (attributeNames.contains(attributeName)) { return new KernelGetAttributeInvoker(kernel, attributeName); } attributeName = Introspector.decapitalize(attributeName); if (attributeNames.contains(attributeName)) { return new KernelGetAttributeInvoker(kernel, attributeName); } } else if (name.startsWith("is")) { String attrName = method.getName().substring(2); if (attributeNames.contains(attrName)) { return new KernelGetAttributeInvoker(kernel, attrName); } attrName = Introspector.decapitalize(attrName); if (attributeNames.contains(attrName)) { return new KernelGetAttributeInvoker(kernel, attrName); } } else if (name.startsWith("set")) { String attrName = method.getName().substring(3); if (attributeNames.contains(attrName)) { return new KernelSetAttributeInvoker(kernel, attrName); } attrName = Introspector.decapitalize(attrName); if (attributeNames.contains(attrName)) { return new KernelSetAttributeInvoker(kernel, attrName); } } return null; } private static int getSuperIndex(Class proxyType, Method method) { Signature signature = new Signature(method.getName(), Type.getReturnType(method), Type.getArgumentTypes(method)); MethodProxy methodProxy = MethodProxy.find(proxyType, signature); if (methodProxy != null) { return methodProxy.getSuperIndex(); } return -1; } private static Integer getMethodIndex(Map attributes, String attributeName) { Iterator iter = attributes.keySet().iterator(); while (iter.hasNext()) { String key = (String) iter.next(); if (key.equalsIgnoreCase(attributeName)) { return (Integer) attributes.get(key); } } return null; } static final class HashCodeInvoke implements ProxyInvoker { public Object invoke(ObjectName objectName, Object[] arguments) throws Throwable { return new Integer(objectName.hashCode()); } } static final class EqualsInvoke implements ProxyInvoker { private final ProxyManager proxyManager; public EqualsInvoke(ProxyManager proxyManager) { this.proxyManager = proxyManager; } public Object invoke(ObjectName objectName, Object[] arguments) throws Throwable { ObjectName proxyTarget = proxyManager.getProxyTarget(arguments[0]); return Boolean.valueOf(objectName.equals(proxyTarget)); } } static final class ToStringInvoke implements ProxyInvoker { private final String interfaceName; public ToStringInvoke(String interfaceName) { this.interfaceName = "[" + interfaceName + ": "; } public Object invoke(ObjectName objectName, Object[] arguments) throws Throwable { return interfaceName + objectName + "]"; } } static final class GetStateInvoke implements ProxyInvoker { private Kernel kernel; public GetStateInvoke(Kernel kernel) { this.kernel = kernel; } public Object invoke(ObjectName objectName, Object[] arguments) throws Throwable { return new Integer(kernel.getGBeanState(objectName)); } } static final class GetStateInstanceInvoke implements ProxyInvoker { private Kernel kernel; public GetStateInstanceInvoke(Kernel kernel) { this.kernel = kernel; } public Object invoke(ObjectName objectName, Object[] arguments) throws Throwable { return State.fromInt(kernel.getGBeanState(objectName)); } } static final class StartInvoke implements ProxyInvoker { private Kernel kernel; public StartInvoke(Kernel kernel) { this.kernel = kernel; } public Object invoke(ObjectName objectName, Object[] arguments) throws Throwable { kernel.startGBean(objectName); return null; } } static final class StartRecursiveInvoke implements ProxyInvoker { private Kernel kernel; public StartRecursiveInvoke(Kernel kernel) { this.kernel = kernel; } public Object invoke(ObjectName objectName, Object[] arguments) throws Throwable { kernel.startRecursiveGBean(objectName); return null; } } static final class GetStartTimeInvoke implements ProxyInvoker { private Kernel kernel; public GetStartTimeInvoke(Kernel kernel) { this.kernel = kernel; } public Object invoke(ObjectName objectName, Object[] arguments) throws Throwable { return new Long(kernel.getGBeanStartTime(objectName)); } } static final class StopInvoke implements ProxyInvoker { private Kernel kernel; public StopInvoke(Kernel kernel) { this.kernel = kernel; } public Object invoke(ObjectName objectName, Object[] arguments) throws Throwable { kernel.stopGBean(objectName); return null; } } static final class GetObjectNameInvoke implements ProxyInvoker { public Object invoke(ObjectName objectName, Object[] arguments) throws Throwable { return objectName.getCanonicalName(); } } }
package Client; import network.Database; import javax.swing.*; import javax.xml.bind.annotation.adapters.HexBinaryAdapter; import java.awt.*; import java.awt.event.*; import java.io.IOException; import java.security.MessageDigest; /** * Created by Bridget on 4/9/2016. */ public class GUILogin extends JPanel{ public JFrame loginscreen; private JButton loginButton = new JButton("Login"); private JButton registerButton = new JButton("Register"); private JTextField userText = new JTextField("Enter Username"); private JPasswordField passwordText = new JPasswordField(); private FocusListener UNFocus = new FocusAdapter() { public void focusGained(java.awt.event.FocusEvent evt) { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { userText.selectAll(); } }); } }; private FocusListener passFocus = new FocusAdapter() { public void focusGained(java.awt.event.FocusEvent evt) { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { passwordText.selectAll(); } }); } }; //Constructor public void createAndShowLogin(){ buildLogin(); } private void buildLogin() { loginscreen = new JFrame("Welcome to SET!"); loginscreen.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); loginscreen.setLayout(null); loginscreen.getContentPane().setBackground(new Color(51, 255, 255)); loginscreen.setPreferredSize(new Dimension(300, 150)); //loginscreen.setResizable(false); JLabel userLabel = new JLabel("Username"); userLabel.setBounds(10, 10, 80, 25); loginscreen.add(userLabel); userText.setBounds(100, 10, 160, 25); JLabel passwordLabel = new JLabel("Password"); passwordLabel.setBounds(10, 40, 80, 25); loginscreen.add(passwordLabel); passwordText.setBounds(100, 40, 160, 25); loginButton.setBounds(10, 80, 80, 25); loginscreen.getRootPane().setDefaultButton(loginButton); registerButton.setBounds(100, 80, 100, 25); addLoginAndRegisterListeners(); loginscreen.add(loginButton); loginscreen.add(registerButton); userText.addFocusListener(UNFocus); passwordText.addFocusListener(passFocus); loginscreen.add(userText); loginscreen.add(passwordText); loginscreen.addWindowListener(new WindowAdapter() { @Override public void windowClosing(WindowEvent e) { //TODO: Add proper exit behavior //ClientInit.inStream.println("END_CONN"); try { ClientInit.sck.close(); } catch (IOException e1) { e1.printStackTrace(); } super.windowClosing(e); } }); loginscreen.setVisible(true); loginscreen.pack(); } public String getUN(){ String regex = "^[a-zA-Z]+$"; if (userText.getText().matches(regex)) return userText.getText(); else return null; } public String getPass(){ /*try{ MessageDigest md = MessageDigest.getInstance("SHA"); return new String((new HexBinaryAdapter()).marshal(md.digest(new String(passwordText.getPassword()).getBytes()))); } catch (Exception e) { return null; } */ if (passwordText.getText().isEmpty()) return null; else return passwordText.getText(); } public void addLoginAndRegisterListeners() { loginButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { String username = getUN(); String password = getPass(); if(username == null || password == null) JOptionPane.showMessageDialog(null, "Please enter a valid username and password. Only Alphanumeric characters are allowed."); else ClientInit.inStream.println("LOGIN," + username + "," + Database.hash(password)); //processResponse(e); } }); registerButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { String username = getUN(); String password = getPass(); if(username == null || password == null) JOptionPane.showMessageDialog(null, "Please enter a valid username and password. Only Alphanumeric characters are allowed."); else ClientInit.inStream.println("REGISTER," + username + "," + Database.hash(password)); //processResponse(e); } }); } public void detachListeners() { for(ActionListener l : loginButton.getActionListeners()) loginButton.removeActionListener(l); for(ActionListener l : registerButton.getActionListeners()) registerButton.removeActionListener(l); userText.removeFocusListener(UNFocus); passwordText.removeFocusListener(passFocus); } public String processResponse(String msg){ String response = null; try { String[] tokens = msg.split("\\s*,\\s*"); System.out.println(response ); if(tokens[0] == "BAD_VALUE"){ //TODO } else if(tokens[0].equals("ACK_REGISTER")) { if (tokens[1].equals("SUCCESS")) { response = "Successfully Registered. Please log in"; JOptionPane.showMessageDialog(null, response); } else if (tokens[1].equals("FAILURE")){ response = "Unable to register. Please try again"; JOptionPane.showMessageDialog(null, response); } else if (tokens[1].equals("EXISTS")) { response = "Unable to register. Username already exists"; JOptionPane.showMessageDialog(null, response); } } else if(tokens[0].equals("ACK_LOGIN")){ if(tokens[1].equals("FAILURE")) { response = "Unable to login. Please try again."; JOptionPane.showMessageDialog(null, response); } if(tokens[1].equals("SUCCESS")){ if (tokens[2].equals("LOBBY")){ ClientInit.game.myUN = userText.getText(); ClientInit.STATE = ClientInit.LOBBY; ClientInit.switchStates(ClientInit.LOGIN,ClientInit.LOBBY); } } } } catch (Exception ex) { ex.printStackTrace(); } return response; } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0 and the Server Side Public License, v 1; you may not use this file except * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ package org.elasticsearch.cluster.routing; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.function.Predicate; import java.util.stream.Collectors; /** * A {@link RoutingNode} represents a cluster node associated with a single {@link DiscoveryNode} including all shards * that are hosted on that nodes. Each {@link RoutingNode} has a unique node id that can be used to identify the node. */ public class RoutingNode implements Iterable<ShardRouting> { private final String nodeId; @Nullable private final DiscoveryNode node; private final LinkedHashMap<ShardId, ShardRouting> shards; // LinkedHashMap to preserve order private final LinkedHashSet<ShardRouting> initializingShards; private final LinkedHashSet<ShardRouting> relocatingShards; private final HashMap<Index, LinkedHashSet<ShardRouting>> shardsByIndex; public RoutingNode(String nodeId, DiscoveryNode node, ShardRouting... shards) { this(nodeId, node, buildShardRoutingMap(shards)); } RoutingNode(String nodeId, @Nullable DiscoveryNode node, LinkedHashMap<ShardId, ShardRouting> shards) { this.nodeId = nodeId; this.node = node; this.shards = shards; this.relocatingShards = new LinkedHashSet<>(); this.initializingShards = new LinkedHashSet<>(); this.shardsByIndex = new LinkedHashMap<>(); for (ShardRouting shardRouting : shards.values()) { if (shardRouting.initializing()) { initializingShards.add(shardRouting); } else if (shardRouting.relocating()) { relocatingShards.add(shardRouting); } shardsByIndex.computeIfAbsent(shardRouting.index(), k -> new LinkedHashSet<>()).add(shardRouting); } assert invariant(); } private RoutingNode(RoutingNode original) { this.nodeId = original.nodeId; this.node = original.node; this.shards = new LinkedHashMap<>(original.shards); this.relocatingShards = new LinkedHashSet<>(original.relocatingShards); this.initializingShards = new LinkedHashSet<>(original.initializingShards); this.shardsByIndex = new LinkedHashMap<>(original.shardsByIndex.size()); for (Map.Entry<Index, LinkedHashSet<ShardRouting>> entry : original.shardsByIndex.entrySet()) { shardsByIndex.put(entry.getKey(), new LinkedHashSet<>(entry.getValue())); } assert invariant(); } RoutingNode copy() { return new RoutingNode(this); } private static LinkedHashMap<ShardId, ShardRouting> buildShardRoutingMap(ShardRouting... shardRoutings) { final LinkedHashMap<ShardId, ShardRouting> shards = new LinkedHashMap<>(); for (ShardRouting shardRouting : shardRoutings) { ShardRouting previousValue = shards.put(shardRouting.shardId(), shardRouting); if (previousValue != null) { throw new IllegalArgumentException( "Cannot have two different shards with same shard id " + shardRouting.shardId() + " on same node " ); } } return shards; } @Override public Iterator<ShardRouting> iterator() { return Collections.unmodifiableCollection(shards.values()).iterator(); } /** * Returns the nodes {@link DiscoveryNode}. * * @return discoveryNode of this node */ @Nullable public DiscoveryNode node() { return this.node; } @Nullable public ShardRouting getByShardId(ShardId id) { return shards.get(id); } /** * Get the id of this node * @return id of the node */ public String nodeId() { return this.nodeId; } public int size() { return shards.size(); } /** * Add a new shard to this node * @param shard Shard to create on this Node */ void add(ShardRouting shard) { assert invariant(); if (shards.containsKey(shard.shardId())) { throw new IllegalStateException( "Trying to add a shard " + shard.shardId() + " to a node [" + nodeId + "] where it already exists. current [" + shards.get(shard.shardId()) + "]. new [" + shard + "]" ); } shards.put(shard.shardId(), shard); if (shard.initializing()) { initializingShards.add(shard); } else if (shard.relocating()) { relocatingShards.add(shard); } shardsByIndex.computeIfAbsent(shard.index(), k -> new LinkedHashSet<>()).add(shard); assert invariant(); } void update(ShardRouting oldShard, ShardRouting newShard) { assert invariant(); if (shards.containsKey(oldShard.shardId()) == false) { // Shard was already removed by routing nodes iterator // TODO: change caller logic in RoutingNodes so that this check can go away return; } ShardRouting previousValue = shards.put(newShard.shardId(), newShard); assert previousValue == oldShard : "expected shard " + previousValue + " but was " + oldShard; if (oldShard.initializing()) { boolean exist = initializingShards.remove(oldShard); assert exist : "expected shard " + oldShard + " to exist in initializingShards"; } else if (oldShard.relocating()) { boolean exist = relocatingShards.remove(oldShard); assert exist : "expected shard " + oldShard + " to exist in relocatingShards"; } shardsByIndex.get(oldShard.index()).remove(oldShard); if (shardsByIndex.get(oldShard.index()).isEmpty()) { shardsByIndex.remove(oldShard.index()); } if (newShard.initializing()) { initializingShards.add(newShard); } else if (newShard.relocating()) { relocatingShards.add(newShard); } shardsByIndex.computeIfAbsent(newShard.index(), k -> new LinkedHashSet<>()).add(newShard); assert invariant(); } void remove(ShardRouting shard) { assert invariant(); ShardRouting previousValue = shards.remove(shard.shardId()); assert previousValue == shard : "expected shard " + previousValue + " but was " + shard; if (shard.initializing()) { boolean exist = initializingShards.remove(shard); assert exist : "expected shard " + shard + " to exist in initializingShards"; } else if (shard.relocating()) { boolean exist = relocatingShards.remove(shard); assert exist : "expected shard " + shard + " to exist in relocatingShards"; } shardsByIndex.get(shard.index()).remove(shard); if (shardsByIndex.get(shard.index()).isEmpty()) { shardsByIndex.remove(shard.index()); } assert invariant(); } /** * Determine the number of shards with a specific state * @param states set of states which should be counted * @return number of shards */ public int numberOfShardsWithState(ShardRoutingState... states) { if (states.length == 1) { if (states[0] == ShardRoutingState.INITIALIZING) { return initializingShards.size(); } else if (states[0] == ShardRoutingState.RELOCATING) { return relocatingShards.size(); } } int count = 0; for (ShardRouting shardEntry : this) { for (ShardRoutingState state : states) { if (shardEntry.state() == state) { count++; } } } return count; } /** * Determine the shards with a specific state * @param states set of states which should be listed * @return List of shards */ public List<ShardRouting> shardsWithState(ShardRoutingState... states) { if (states.length == 1) { if (states[0] == ShardRoutingState.INITIALIZING) { return new ArrayList<>(initializingShards); } else if (states[0] == ShardRoutingState.RELOCATING) { return new ArrayList<>(relocatingShards); } } List<ShardRouting> shards = new ArrayList<>(); for (ShardRouting shardEntry : this) { for (ShardRoutingState state : states) { if (shardEntry.state() == state) { shards.add(shardEntry); } } } return shards; } /** * Determine the shards of an index with a specific state * @param index id of the index * @param states set of states which should be listed * @return a list of shards */ public List<ShardRouting> shardsWithState(String index, ShardRoutingState... states) { List<ShardRouting> shards = new ArrayList<>(); if (states.length == 1) { if (states[0] == ShardRoutingState.INITIALIZING) { for (ShardRouting shardEntry : initializingShards) { if (shardEntry.getIndexName().equals(index) == false) { continue; } shards.add(shardEntry); } return shards; } else if (states[0] == ShardRoutingState.RELOCATING) { for (ShardRouting shardEntry : relocatingShards) { if (shardEntry.getIndexName().equals(index) == false) { continue; } shards.add(shardEntry); } return shards; } } for (ShardRouting shardEntry : this) { if (shardEntry.getIndexName().equals(index) == false) { continue; } for (ShardRoutingState state : states) { if (shardEntry.state() == state) { shards.add(shardEntry); } } } return shards; } /** * The number of shards on this node that will not be eventually relocated. */ public int numberOfOwningShards() { return shards.size() - relocatingShards.size(); } public int numberOfOwningShardsForIndex(final Index index) { final LinkedHashSet<ShardRouting> shardRoutings = shardsByIndex.get(index); if (shardRoutings == null) { return 0; } else { return Math.toIntExact(shardRoutings.stream().filter(Predicate.not(ShardRouting::relocating)).count()); } } public String prettyPrint() { StringBuilder sb = new StringBuilder(); sb.append("-----node_id[").append(nodeId).append("][").append(node == null ? "X" : "V").append("]\n"); for (ShardRouting entry : shards.values()) { sb.append("--------").append(entry.shortSummary()).append('\n'); } return sb.toString(); } public String toString() { StringBuilder sb = new StringBuilder(); sb.append("routingNode (["); if (node != null) { sb.append(node.getName()); sb.append("]["); sb.append(node.getId()); sb.append("]["); sb.append(node.getHostName()); sb.append("]["); sb.append(node.getHostAddress()); } else { sb.append("null"); } sb.append("], ["); sb.append(shards.size()); sb.append(" assigned shards])"); return sb.toString(); } public List<ShardRouting> copyShards() { return new ArrayList<>(shards.values()); } public boolean isEmpty() { return shards.isEmpty(); } private boolean invariant() { // initializingShards must consistent with that in shards Collection<ShardRouting> shardRoutingsInitializing = shards.values() .stream() .filter(ShardRouting::initializing) .collect(Collectors.toList()); assert initializingShards.size() == shardRoutingsInitializing.size(); assert initializingShards.containsAll(shardRoutingsInitializing); // relocatingShards must consistent with that in shards Collection<ShardRouting> shardRoutingsRelocating = shards.values() .stream() .filter(ShardRouting::relocating) .collect(Collectors.toList()); assert relocatingShards.size() == shardRoutingsRelocating.size(); assert relocatingShards.containsAll(shardRoutingsRelocating); final Map<Index, Set<ShardRouting>> shardRoutingsByIndex = shards.values() .stream() .collect(Collectors.groupingBy(ShardRouting::index, Collectors.toSet())); assert shardRoutingsByIndex.equals(shardsByIndex); return true; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } RoutingNode that = (RoutingNode) o; return nodeId.equals(that.nodeId) && Objects.equals(node, that.node) && shards.equals(that.shards); } @Override public int hashCode() { return Objects.hash(nodeId, node, shards); } }
/* * Copyright 2017 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jbpm.services.ejb.timer; import java.io.Serializable; import java.time.ZonedDateTime; import java.time.temporal.ChronoUnit; import java.util.Date; import java.util.concurrent.Callable; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import javax.annotation.PostConstruct; import javax.annotation.Resource; import javax.ejb.ConcurrencyManagement; import javax.ejb.ConcurrencyManagementType; import javax.ejb.Lock; import javax.ejb.LockType; import javax.ejb.NoSuchObjectLocalException; import javax.ejb.Singleton; import javax.ejb.Startup; import javax.ejb.Timeout; import javax.ejb.Timer; import javax.ejb.TimerConfig; import javax.ejb.TransactionManagement; import javax.ejb.TransactionManagementType; import javax.transaction.Status; import javax.transaction.UserTransaction; import org.drools.core.time.JobHandle; import org.drools.core.time.impl.TimerJobInstance; import org.jbpm.process.core.timer.TimerServiceRegistry; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @Singleton @Startup @ConcurrencyManagement(ConcurrencyManagementType.CONTAINER) @TransactionManagement(TransactionManagementType.BEAN) @Lock(LockType.READ) public class EJBTimerScheduler { private static final Logger logger = LoggerFactory.getLogger(EJBTimerScheduler.class); private enum TimerExceptionPolicy { RETRY, PLATFORM }; private static final Long TIMER_RETRY_INTERVAL = Long.parseLong(System.getProperty("org.kie.jbpm.timer.retry.interval", "5000")); private static final Integer TIMER_RETRY_LIMIT = Integer.parseInt(System.getProperty("org.kie.jbpm.timer.retry.limit", "3")); private static final TimerExceptionPolicy TIMER_RETRY_POLICY = Enum.valueOf(TimerExceptionPolicy.class, System.getProperty("org.kie.jbpm.timer.retry.policy", "PLATFORM")); private static final Integer OVERDUE_WAIT_TIME = Integer.parseInt(System.getProperty("org.jbpm.overdue.timer.wait", "20000")); private static final boolean USE_LOCAL_CACHE = Boolean.parseBoolean(System.getProperty("org.jbpm.ejb.timer.local.cache", "true")); private ConcurrentMap<String, TimerJobInstance> localCache = new ConcurrentHashMap<String, TimerJobInstance>(); @Resource protected javax.ejb.TimerService timerService; @Resource protected UserTransaction utx; @PostConstruct public void setup() { // disable auto init of timers since ejb timer service supports persistence of timers System.setProperty("org.jbpm.rm.init.timer", "false"); logger.info("Using local cache for EJB timers: {}", USE_LOCAL_CACHE); } @Timeout public void executeTimerJob(Timer timer) { EjbTimerJob timerJob = (EjbTimerJob) timer.getInfo(); TimerJobInstance timerJobInstance = timerJob.getTimerJobInstance(); logger.debug("About to execute timer for job {}", timerJob); String timerServiceId = ((EjbGlobalJobHandle) timerJobInstance.getJobHandle()).getDeploymentId(); // handle overdue timers as ejb timer service might start before all deployments are ready long time = 0; while (TimerServiceRegistry.getInstance().get(timerServiceId) == null) { logger.debug("waiting for timer service to be available, elapsed time {} ms", time); try { Thread.sleep(500); } catch (InterruptedException e) { e.printStackTrace(); } time += 500; if (time > OVERDUE_WAIT_TIME) { logger.debug("No timer service found after waiting {} ms", time); break; } } try { transaction(this::executeTimerJobInstance, timerJobInstance); } catch (Exception e) { recoverTimerJobInstance(timerJob, e); } } private void executeTimerJobInstance(TimerJobInstance timerJobInstance) throws Exception { try { ((Callable<?>) timerJobInstance).call(); } catch (Exception e) { logger.warn("Execution of time failed due to {}", e.getMessage(), e); throw e; } } private void recoverTimerJobInstance(EjbTimerJob ejbTimerJob, Exception e) { // if we have next date fired means that it would have been reescheduled already by DefaultTimerJobInstance if (ejbTimerJob.getTimerJobInstance().getTrigger().hasNextFireTime() != null) { logger.warn("Execution of time failed Interval Trigger failed {}", ejbTimerJob.getTimerJobInstance()); return; } // if there is not next date to be fired, we need to apply policy otherwise will be lost switch (TIMER_RETRY_POLICY) { case RETRY: logger.warn("Execution of time failed. The timer will be retried {}", ejbTimerJob.getTimerJobInstance()); Transaction<TimerJobInstance> operation = (instance) -> { ZonedDateTime nextRetry = ZonedDateTime.now().plus(TIMER_RETRY_INTERVAL, ChronoUnit.MILLIS); EjbTimerJobRetry info = null; if(ejbTimerJob instanceof EjbTimerJobRetry) { info = ((EjbTimerJobRetry) ejbTimerJob).next(); } else { info = new EjbTimerJobRetry(instance); } if (TIMER_RETRY_LIMIT > 0 && info.getRetry() > TIMER_RETRY_LIMIT) { logger.warn("The timer {} reached retry limit {}. It won't be retried again", instance, TIMER_RETRY_LIMIT); return; } TimerConfig config = new TimerConfig(info, true); timerService.createSingleActionTimer(Date.from(nextRetry.toInstant()), config); }; try { transaction(operation, ejbTimerJob.getTimerJobInstance()); } catch (Exception e1) { logger.error("Failed to executed timer recovery {}", e1.getMessage(), e1); } break; case PLATFORM: logger.warn("Execution of time failed. Application server policy applied {}", ejbTimerJob.getTimerJobInstance()); throw new RuntimeException(e); } } @FunctionalInterface private interface Transaction<I> { void doWork(I item) throws Exception; } private <I> void transaction(Transaction<I> operation, I item) throws Exception { try { utx.begin(); operation.doWork(item); utx.commit(); } catch(Exception e) { try { if (utx.getStatus() != Status.STATUS_NO_TRANSACTION) { utx.rollback(); } } catch (Exception re) { logger.error("transaction could not be rolled back", re); } throw e; } } public void internalSchedule(TimerJobInstance timerJobInstance) { TimerConfig config = new TimerConfig(new EjbTimerJob(timerJobInstance), true); Date expirationTime = timerJobInstance.getTrigger().hasNextFireTime(); logger.debug("Timer expiration date is {}", expirationTime); if (expirationTime != null) { timerService.createSingleActionTimer(expirationTime, config); logger.debug("Timer scheduled {} on {} scheduler service", timerJobInstance); if (USE_LOCAL_CACHE) { localCache.putIfAbsent(((EjbGlobalJobHandle) timerJobInstance.getJobHandle()).getUuid(), timerJobInstance); } } else { logger.info("Timer that was to be scheduled has already expired"); } } public boolean removeJob(JobHandle jobHandle) { EjbGlobalJobHandle ejbHandle = (EjbGlobalJobHandle) jobHandle; for (Timer timer : timerService.getTimers()) { try { Serializable info = timer.getInfo(); if (info instanceof EjbTimerJob) { EjbTimerJob job = (EjbTimerJob) info; EjbGlobalJobHandle handle = (EjbGlobalJobHandle) job.getTimerJobInstance().getJobHandle(); if (handle.getUuid().equals(ejbHandle.getUuid())) { logger.debug("Job handle {} does match timer and is going to be canceled", jobHandle); if (USE_LOCAL_CACHE) { localCache.remove(handle.getUuid()); } try { timer.cancel(); } catch (Throwable e) { logger.debug("Timer cancel error due to {}", e.getMessage()); return false; } return true; } } } catch (NoSuchObjectLocalException e) { logger.debug("Timer {} has already expired or was canceled ", timer); } } logger.debug("Job handle {} does not match any timer on {} scheduler service", jobHandle, this); return false; } public TimerJobInstance getTimerByName(String jobName) { if (USE_LOCAL_CACHE) { if (localCache.containsKey(jobName)) { logger.debug("Found job {} in cache returning", jobName); return localCache.get(jobName); } } TimerJobInstance found = null; for (Timer timer : timerService.getTimers()) { try { Serializable info = timer.getInfo(); if (info instanceof EjbTimerJob) { EjbTimerJob job = (EjbTimerJob) info; EjbGlobalJobHandle handle = (EjbGlobalJobHandle) job.getTimerJobInstance().getJobHandle(); if (handle.getUuid().equals(jobName)) { found = handle.getTimerJobInstance(); if (USE_LOCAL_CACHE) { localCache.putIfAbsent(jobName, found); } logger.debug("Job {} does match timer and is going to be returned {}", jobName, found); break; } } } catch (NoSuchObjectLocalException e) { logger.debug("Timer info for {} was not found ", timer); } } return found; } }
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.plugins.gradle.compiler; import org.junit.Test; import java.io.IOException; /** * @author Vladislav.Soroka * @since 7/21/2014 */ @SuppressWarnings("JUnit4AnnotatedMethodInJUnit3TestCase") public class GradleResourceProcessingTest extends GradleCompilingTestCase { @Test public void testBasicResourceCopying() throws Exception { createProjectSubFile("src/main/resources/dir/file.properties"); createProjectSubFile("src/test/resources/dir/file-test.properties"); importProject( "apply plugin: 'java'" ); assertModules("project", "project_main", "project_test"); compileModules("project_main", "project_test"); assertCopied("build/resources/main/dir/file.properties"); assertCopied("build/resources/test/dir/file-test.properties"); } @Test public void testBasicResourceCopying_MergedProject() throws Exception { createProjectSubFile("src/main/resources/dir/file.properties"); createProjectSubFile("src/test/resources/dir/file-test.properties"); importProjectUsingSingeModulePerGradleProject( "apply plugin: 'java'" ); assertModules("project"); compileModules("project"); assertCopied("build/resources/main/dir/file.properties"); assertCopied("build/resources/test/dir/file-test.properties"); } @Test public void testResourceCopyingFromSourcesFolder() throws Exception { createProjectSubFile("src/main/resources/dir/file.properties"); createProjectSubFile("src/test/resources/dir/file-test.properties"); createProjectSubFile("src/main/java/file.txt"); importProject( "apply plugin: 'java'\n" + "sourceSets {\n" + " main {\n" + " resources.srcDir file('src/main/java')\n" + " }\n" + "}" ); assertModules("project", "project_main", "project_test"); compileModules("project_main", "project_test"); assertCopied("build/resources/main/dir/file.properties"); assertCopied("build/resources/test/dir/file-test.properties"); assertCopied("build/resources/main/file.txt"); } @Test public void testResourceCopyingFromSourcesFolder_MergedProject() throws Exception { createProjectSubFile("src/main/resources/dir/file.properties"); createProjectSubFile("src/test/resources/dir/file-test.properties"); createProjectSubFile("src/main/java/file.txt"); importProjectUsingSingeModulePerGradleProject( "apply plugin: 'java'\n" + "sourceSets {\n" + " main {\n" + " resources.srcDir file('src/main/java')\n" + " }\n" + "}" ); assertModules("project"); compileModules("project"); assertCopied("build/resources/main/dir/file.properties"); assertCopied("build/resources/test/dir/file-test.properties"); assertCopied("build/resources/main/file.txt"); } @Test public void testResourceProcessingWithIdeaPluginCustomization() throws Exception { createProjectSubFile("src/main/resources/dir/file.properties"); createProjectSubFile("src/test/resources/dir/file-test.properties"); importProject( "apply plugin: 'java'\n" + "apply plugin: 'idea'\n" + "idea {\n" + " module {\n" + " inheritOutputDirs = false\n" + " outputDir = file('muchBetterOutputDir')\n" + " testOutputDir = file('muchBetterTestOutputDir')\n" + " }\n" + "}" ); assertModules("project", "project_main", "project_test"); compileModules("project_main", "project_test"); assertCopied("muchBetterOutputDir/dir/file.properties"); assertCopied("muchBetterTestOutputDir/dir/file-test.properties"); } @Test public void testResourceProcessingWithIdeaPluginCustomization_Merged() throws Exception { createProjectSubFile("src/main/resources/dir/file.properties"); createProjectSubFile("src/test/resources/dir/file-test.properties"); importProjectUsingSingeModulePerGradleProject( "apply plugin: 'java'\n" + "apply plugin: 'idea'\n" + "idea {\n" + " module {\n" + " inheritOutputDirs = false\n" + " outputDir = file('muchBetterOutputDir')\n" + " testOutputDir = file('muchBetterTestOutputDir')\n" + " }\n" + "}" ); assertModules("project"); compileModules("project"); assertCopied("muchBetterOutputDir/dir/file.properties"); assertCopied("muchBetterTestOutputDir/dir/file-test.properties"); } @Test public void testIncludesAndExcludesInSourceSets() throws Exception { createFilesForIncludesAndExcludesTest(); importProject( "apply plugin: 'java'\n" + "\n" + "sourceSets {\n" + " main {\n" + " resources {\n" + " include '**/*.yyy'\n" + " include '**/*.xxx'\n" + " exclude 'dir/*.yyy'\n" + " exclude '*.xxx'\n" + " }\n" + " }\n" + " test {\n" + " resources {\n" + " include '**/*.yyy'\n" + " include '**/*.xxx'\n" + " exclude 'dir/*.yyy'\n" + " exclude '*.xxx'\n" + " }\n" + " }\n" + "}" ); assertModules("project", "project_main", "project_test"); compileModules("project_main", "project_test"); assertCopiedResources(); } @Test public void testIncludesAndExcludesInSourceSets_MergedProject() throws Exception { createFilesForIncludesAndExcludesTest(); importProjectUsingSingeModulePerGradleProject( "apply plugin: 'java'\n" + "\n" + "sourceSets {\n" + " main {\n" + " resources {\n" + " include '**/*.yyy'\n" + " include '**/*.xxx'\n" + " exclude 'dir/*.yyy'\n" + " exclude '*.xxx'\n" + " }\n" + " }\n" + " test {\n" + " resources {\n" + " include '**/*.yyy'\n" + " include '**/*.xxx'\n" + " exclude 'dir/*.yyy'\n" + " exclude '*.xxx'\n" + " }\n" + " }\n" + "}" ); assertModules("project"); compileModules("project"); assertCopiedResources(); } @Test public void testIncludesAndExcludesInAllSourceSets() throws Exception { createFilesForIncludesAndExcludesTest(); importProject( "apply plugin: 'java'\n" + "\n" + "sourceSets.all {\n" + " resources {\n" + " include '**/*.yyy'\n" + " include '**/*.xxx'\n" + " exclude 'dir/*.yyy'\n" + " exclude '*.xxx'\n" + " }\n" + "}" ); assertModules("project", "project_main", "project_test"); compileModules("project_main", "project_test"); assertCopiedResources(); } @Test public void testIncludesAndExcludesInAllSourceSets_MergedProject() throws Exception { createFilesForIncludesAndExcludesTest(); importProjectUsingSingeModulePerGradleProject( "apply plugin: 'java'\n" + "\n" + "sourceSets.all {\n" + " resources {\n" + " include '**/*.yyy'\n" + " include '**/*.xxx'\n" + " exclude 'dir/*.yyy'\n" + " exclude '*.xxx'\n" + " }\n" + "}" ); assertModules("project"); compileModules("project"); assertCopiedResources(); } @Test public void testIncludesAndExcludesInResourcesTask() throws Exception { createFilesForIncludesAndExcludesTest(); importProject( "apply plugin: 'java'\n" + "\n" + "processResources {\n" + " include '**/*.yyy'\n" + " include '**/*.xxx'\n" + " exclude 'dir/*.yyy'\n" + " exclude '*.xxx'\n" + "}\n" + "\n" + "processTestResources {\n" + " include '**/*.yyy'\n" + " include '**/*.xxx'\n" + " exclude 'dir/*.yyy'\n" + " exclude '*.xxx'\n" + "}\n" ); assertModules("project", "project_main", "project_test"); compileModules("project_main", "project_test"); assertCopiedResources(); } @Test public void testIncludesAndExcludesInResourcesTask_MergedProject() throws Exception { createFilesForIncludesAndExcludesTest(); importProjectUsingSingeModulePerGradleProject( "apply plugin: 'java'\n" + "\n" + "processResources {\n" + " include '**/*.yyy'\n" + " include '**/*.xxx'\n" + " exclude 'dir/*.yyy'\n" + " exclude '*.xxx'\n" + "}\n" + "\n" + "processTestResources {\n" + " include '**/*.yyy'\n" + " include '**/*.xxx'\n" + " exclude 'dir/*.yyy'\n" + " exclude '*.xxx'\n" + "}\n" ); assertModules("project"); compileModules("project"); assertCopiedResources(); } @Test public void testModuleWithNameTestResourceCopying() throws Exception { createProjectSubFile("bar/foo/src/main/resources/dir/file.properties"); createProjectSubFile("bar/foo/src/test/resources/dir/file-test.properties"); createProjectSubFile("bar/test/src/main/resources/dir/file.properties"); createProjectSubFile("bar/test/src/test/resources/dir/file-test.properties"); createSettingsFile("include ':bar:foo'\n" + "include ':bar:test'"); importProjectUsingSingeModulePerGradleProject( "subprojects {\n" + " apply plugin: 'java'\n" + "}\n" ); assertModules("project", "bar", "foo", "test"); compileModules("project", "bar", "foo", "test"); assertCopied("bar/foo/build/resources/main/dir/file.properties"); assertCopied("bar/foo/build/resources/test/dir/file-test.properties"); assertCopied("bar/test/build/resources/main/dir/file.properties"); assertCopied("bar/test/build/resources/test/dir/file-test.properties"); } private void createFilesForIncludesAndExcludesTest() throws IOException { createProjectSubFile("src/main/resources/dir/file.xxx"); createProjectSubFile("src/main/resources/dir/file.yyy"); createProjectSubFile("src/main/resources/file.xxx"); createProjectSubFile("src/main/resources/file.yyy"); createProjectSubFile("src/main/resources/file.zzz"); createProjectSubFile("src/test/resources/dir/file.xxx"); createProjectSubFile("src/test/resources/dir/file.yyy"); createProjectSubFile("src/test/resources/file.xxx"); createProjectSubFile("src/test/resources/file.yyy"); createProjectSubFile("src/test/resources/file.zzz"); } private void assertCopiedResources() { // assert production resources assertCopied("build/resources/main/dir/file.xxx"); assertCopied("build/resources/main/file.yyy"); assertNotCopied("build/resources/main/dir/file.yyy"); assertNotCopied("build/resources/main/file.xxx"); assertNotCopied("build/resources/main/file.zzz"); // assert test resources assertCopied("build/resources/test/dir/file.xxx"); assertCopied("build/resources/test/file.yyy"); assertNotCopied("build/resources/test/dir/file.yyy"); assertNotCopied("build/resources/test/file.xxx"); assertNotCopied("build/resources/test/file.zzz"); } }
/* * Copyright 2015 Brent Douglas and other contributors * as indicated by the @author tags. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.machinecode.chainlink.core.transport; import gnu.trove.map.TLongObjectMap; import gnu.trove.map.hash.TLongObjectHashMap; import io.machinecode.chainlink.core.Constants; import io.machinecode.chainlink.core.execution.CallbackEventImpl; import io.machinecode.chainlink.core.execution.ExecutableEventImpl; import io.machinecode.chainlink.core.registry.UUIDId; import io.machinecode.chainlink.core.then.AllChain; import io.machinecode.chainlink.core.then.ChainImpl; import io.machinecode.chainlink.core.transport.cmd.CleanupCommand; import io.machinecode.chainlink.core.transport.cmd.Command; import io.machinecode.chainlink.core.transport.cmd.GetWorkerIdAndPushChainCommand; import io.machinecode.chainlink.core.transport.cmd.GetWorkerIdsCommand; import io.machinecode.chainlink.core.transport.cmd.PushChainCommand; import io.machinecode.chainlink.spi.Messages; import io.machinecode.chainlink.spi.configuration.Configuration; import io.machinecode.chainlink.spi.configuration.Dependencies; import io.machinecode.chainlink.spi.property.PropertyLookup; import io.machinecode.chainlink.spi.context.ExecutionContext; import io.machinecode.chainlink.spi.execution.Executable; import io.machinecode.chainlink.spi.execution.Worker; import io.machinecode.chainlink.spi.execution.WorkerId; import io.machinecode.chainlink.spi.registry.ChainId; import io.machinecode.chainlink.spi.registry.ExecutableId; import io.machinecode.chainlink.spi.registry.JobEventListener; import io.machinecode.chainlink.spi.registry.Registry; import io.machinecode.chainlink.spi.registry.RepositoryId; import io.machinecode.chainlink.spi.repository.Repository; import io.machinecode.chainlink.spi.then.Chain; import io.machinecode.chainlink.spi.transport.Transport; import io.machinecode.then.api.Deferred; import io.machinecode.then.api.OnComplete; import io.machinecode.then.api.Promise; import io.machinecode.then.api.Reject; import io.machinecode.then.api.Resolve; import io.machinecode.then.core.AllDeferred; import io.machinecode.then.core.FutureDeferred; import io.machinecode.then.core.RejectedDeferred; import io.machinecode.then.core.ResolvedDeferred; import org.jboss.logging.Logger; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.ListIterator; import java.util.concurrent.Executor; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; /** * @author <a href="mailto:brent.n.douglas@gmail.com">Brent Douglas</a> * @since 1.0 */ public abstract class DistributedTransport<A> implements Transport { private static final Logger log = Logger.getLogger(DistributedTransport.class); protected final Registry registry; protected Configuration configuration; protected final Executor reaper; final TLongObjectMap<List<A>> remoteExecutions = new TLongObjectHashMap<>(); protected final long timeout; protected final TimeUnit unit; public DistributedTransport(final Dependencies dependencies, final PropertyLookup properties) throws Exception { this.registry = dependencies.getRegistry(); this.reaper = Executors.newSingleThreadExecutor(); this.timeout = Long.parseLong(properties.getProperty(Constants.TIMEOUT, Constants.Defaults.NETWORK_TIMEOUT)); this.unit = TimeUnit.valueOf(properties.getProperty(Constants.TIMEOUT_UNIT, Constants.Defaults.NETWORK_TIMEOUT_UNIT)); this.registry.registerJobEventListener("cleanup-remote-jobs", new JobEventListener() { @Override public Promise<?,?,?> onRegister(final long jobExecutionId, final Chain<?> job) { remoteExecutions.put(jobExecutionId, new ArrayList<A>()); return null; } @Override public Promise<?,?,?> onUnregister(final long jobExecutionId, final Chain<?> job) { final CleanupCommand command = new CleanupCommand(jobExecutionId); final FutureDeferred<Object, Void> promise = new FutureDeferred<>(job, timeout, unit); promise.onComplete(new OnComplete() { @Override public void complete(final int status) { for (final A address : remoteExecutions.remove(jobExecutionId)) { if (!address.equals(getAddress())) { try { invokeRemote(address, command); } catch (Exception e) { log.errorf(e, ""); // TODO Message } } } log.debugf(Messages.get("CHAINLINK-005101.registry.removed.job"), jobExecutionId); } }); reaper.execute(promise); return promise; } }); } @Override public void open(final Configuration configuration) throws Exception { this.configuration = configuration; } @Override public void close() throws Exception { this.registry.unregisterJobEventListener("cleanup-remote-jobs"); } @Override public Promise<Chain<?>,Throwable,Object> distribute(final int maxThreads, final Executable... executables) throws Exception { if (executables.length == 0) { throw new IllegalArgumentException(); //TODO Message } final long jobExecutionId = executables[0].getContext().getJobExecutionId(); return _getWorkers(jobExecutionId, maxThreads) .then(new Reject<List<RemoteExecution>, Throwable, Chain<?>, Throwable, Object>() { @Override public void resolve(final List<RemoteExecution> that, final Deferred<Chain<?>, Throwable, Object> next) { ListIterator<RemoteExecution> it = that.listIterator(); final Chain<?>[] chains = new Chain[executables.length]; int i = 0; for (final Executable executable : executables) { if (!it.hasNext()) { it = that.listIterator(); } final RemoteExecution remote = it.next(); final int index = i++; chains[index] = remote.getChain(); registry.registerChain(jobExecutionId, remote.getLocalId(), remote.getChain()); final Worker worker = remote.getWorker(); worker.execute(new ExecutableEventImpl(executable, remote.getRemoteId())); } next.resolve(new AllChain<Executable>(chains)); } @Override public void reject(final Throwable that, final Deferred<Chain<?>, Throwable, Object> next) { try { next.resolve(LocalTransport.localDistribute(configuration, maxThreads, executables)); } catch (final Exception e) { e.addSuppressed(that); next.reject(e); } } }); } @Override public Promise<Chain<?>,Throwable,Object> callback(final ExecutableId executableId, final ExecutionContext context) throws Exception { final long jobExecutionId = context.getJobExecutionId(); return _getWorker(jobExecutionId, executableId).then(new Reject<RemoteExecution, Throwable, Chain<?>, Throwable, Object>() { @Override public void resolve(final RemoteExecution that, final Deferred<Chain<?>,Throwable,Object> next) { final Worker worker = that.getWorker(); registry.registerChain(jobExecutionId, that.getLocalId(), that.getChain()); worker.callback(new CallbackEventImpl(jobExecutionId, executableId, that.getRemoteId(), context)); next.resolve(that.getChain()); } @Override public void reject(final Throwable that, final Deferred<Chain<?>, Throwable, Object> next) { try { next.resolve(LocalTransport.localCallback(configuration, executableId, context)); } catch (final Exception e) { e.addSuppressed(that); next.reject(e); } } }); } @Override public Repository getRepository(final RepositoryId id) throws Exception { return new DistributedProxyRepository(this, id); } protected Promise<RemoteExecution,? extends Throwable,Object> _getWorker(final long jobExecutionId, final ExecutableId executableId) throws Exception { final Executable executable = configuration.getRegistry().getExecutable(jobExecutionId, executableId); if (executable != null) { final Worker worker = configuration.getExecutor().getWorker(executable.getWorkerId()); if (worker == null) { return new RejectedDeferred<RemoteExecution,Throwable,Object>(new Exception("No worker found with jobExecutionId=" + jobExecutionId + " and executableId" + executableId)); } final UUIDId id = new UUIDId(this); return new ResolvedDeferred<>(new RemoteExecution(worker, id, id, new ChainImpl<Void>())); } final ChainId localId = new UUIDId(this); return invokeRemote(executableId.getAddress(), new GetWorkerIdAndPushChainCommand(jobExecutionId, executableId, localId)) .then(new Resolve<RemoteWorkerAndChain, RemoteExecution, Throwable, Object>() { @Override public void resolve(final RemoteWorkerAndChain remote, final Deferred<RemoteExecution, Throwable, Object> next) { if (remote != null) { next.resolve( new RemoteExecution( new DistributedWorker(DistributedTransport.this, remote.workerId), localId, remote.chainId, new DistributedLocalChain(DistributedTransport.this, remote.workerId.getAddress(), jobExecutionId, remote.chainId) ) ); } else { next.reject(new Exception("No remote worker for executable id " + executableId + " found")); } } }); } private Promise<RemoteExecution,Throwable,?> getRemoteChainAndIds(final WorkerId workerId, final long jobExecutionId) { final ChainId localId = new UUIDId(this); return invokeRemote( workerId.getAddress(), new PushChainCommand(jobExecutionId, localId), getTimeout(), getTimeUnit() ).then(new Resolve<ChainId, RemoteExecution, Throwable, Object>() { @Override public void resolve(final ChainId remoteId, final Deferred<RemoteExecution, Throwable, Object> next) { next.resolve( new RemoteExecution( new DistributedWorker(DistributedTransport.this, workerId), localId, remoteId, new DistributedLocalChain(DistributedTransport.this, workerId.getAddress(), jobExecutionId, remoteId) ) ); } }); } protected Promise<List<RemoteExecution>,Throwable,Object> _getWorkers(final long jobExecutionId, final int required) { return invokeEverywhere(new GetWorkerIdsCommand(required)).then(new Resolve<Iterable<Iterable<WorkerId>>, List<RemoteExecution>, Throwable, Object>() { @Override public void resolve(final Iterable<Iterable<WorkerId>> that, final Deferred<List<RemoteExecution>, Throwable, Object> next) { int i = 0; final List<Promise<RemoteExecution, Throwable, ?>> promises = new ArrayList<>(required); loop: while (i < required) { for (final Iterable<WorkerId> node : that) { for (final WorkerId workerId : node) { promises.add(getRemoteChainAndIds(workerId, jobExecutionId)); ++i; if (i >= required) { break loop; } } } if (i == 0) { next.reject(new Exception("No remote workers found")); return; } } new AllDeferred<>(promises) .onResolve(next) .onReject(next) .onCancel(next); } }); } public abstract A getAddress(); protected final <T> Promise<T, Throwable, Object> invokeRemote(final Object address, final Command<T> command) { return invokeRemote(address, command, timeout, unit); } protected abstract <T> Promise<T,Throwable,Object> invokeRemote(final Object address, final Command<T> command, final long timeout, final TimeUnit unit); protected abstract <T> Promise<? extends Iterable<T>,Throwable,Object> invokeEverywhere(final Command<T> command); @Override public long getTimeout() { return timeout; } @Override public TimeUnit getTimeUnit() { return unit; } protected List<A> _remoteMembers(final Collection<A> all) { final List<A> that = new ArrayList<>(all); that.remove(this.getAddress()); return Collections.unmodifiableList(that); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* $Id$ */ package org.apache.fop.hyphenation; import java.io.BufferedInputStream; import java.io.IOException; import java.io.InputStream; import java.io.ObjectInputStream; import java.net.URISyntaxException; import java.util.Map; import org.xml.sax.InputSource; import org.apache.commons.io.IOUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.fop.ResourceEventProducer; import org.apache.fop.apps.io.InternalResourceResolver; import org.apache.fop.events.EventBroadcaster; /** * <p>This class is the main entry point to the hyphenation package. * You can use only the static methods or create an instance.</p> * * <p>This work was authored by Carlos Villegas (cav@uniscope.co.jp).</p> */ public final class Hyphenator { /** logging instance */ private static final Log log = LogFactory.getLog(Hyphenator.class); private static HyphenationTreeCache hTreeCache; /** Enables a dump of statistics. Note: If activated content is sent to System.out! */ private static boolean statisticsDump; public static final String HYPTYPE = Hyphenator.class.toString() + "HYP"; public static final String XMLTYPE = Hyphenator.class.toString() + "XML"; /** * Creates a new hyphenator. */ private Hyphenator() { } /** @return the default (static) hyphenation tree cache */ public static synchronized HyphenationTreeCache getHyphenationTreeCache() { if (hTreeCache == null) { hTreeCache = new HyphenationTreeCache(); } return hTreeCache; } /** * Clears the default hyphenation tree cache.<br> * This method can be used if the underlying data files are changed at runtime. */ public static synchronized void clearHyphenationTreeCache() { hTreeCache = new HyphenationTreeCache(); } /** * Returns a hyphenation tree for a given language and country, * with fallback from (lang,country) to (lang). * The hyphenation trees are cached. * @param lang the language * @param country the country (may be null or "none") * @param resolver resolver to find the hyphenation files * @param hyphPatNames the map with user-configured hyphenation pattern file names * @return the hyphenation tree */ public static HyphenationTree getHyphenationTree(String lang, String country, InternalResourceResolver resolver, Map hyphPatNames) { return getHyphenationTree(lang, country, resolver, hyphPatNames, null); } public static HyphenationTree getHyphenationTree(String lang, String country, InternalResourceResolver resourceResolver, Map hyphPatNames, EventBroadcaster eventBroadcaster) { String llccKey = HyphenationTreeCache.constructLlccKey(lang, country); HyphenationTreeCache cache = getHyphenationTreeCache(); // If this hyphenation tree has been registered as missing, return immediately if (cache.isMissing(llccKey)) { return null; } HyphenationTree hTree; // first try to find it in the cache hTree = getHyphenationTreeCache().getHyphenationTree(lang, country); if (hTree != null) { return hTree; } String key = HyphenationTreeCache.constructUserKey(lang, country, hyphPatNames); if (key == null) { key = llccKey; } if (resourceResolver != null) { hTree = getUserHyphenationTree(key, resourceResolver); } if (hTree == null) { hTree = getFopHyphenationTree(key); } if (hTree == null && country != null && !country.equals("none")) { return getHyphenationTree(lang, null, resourceResolver, hyphPatNames, eventBroadcaster); } // put it into the pattern cache if (hTree != null) { cache.cache(llccKey, hTree); } else { if (eventBroadcaster == null) { log.error("Couldn't find hyphenation pattern " + llccKey); } else { ResourceEventProducer producer = ResourceEventProducer.Provider.get(eventBroadcaster); String name = key.replace(HYPTYPE, "").replace(XMLTYPE, ""); producer.hyphenationNotFound(cache, name); } cache.noteMissing(llccKey); } return hTree; } private static InputStream getResourceStream(String key) { InputStream is = null; // Try to use Context Class Loader to load the properties file. try { java.lang.reflect.Method getCCL = Thread.class.getMethod( "getContextClassLoader", new Class[0]); if (getCCL != null) { ClassLoader contextClassLoader = (ClassLoader)getCCL.invoke( Thread.currentThread(), new Object[0]); is = contextClassLoader.getResourceAsStream("hyph/" + key + ".hyp"); } } catch (NoSuchMethodException e) { //ignore, fallback further down } catch (IllegalAccessException e) { //ignore, fallback further down } catch (java.lang.reflect.InvocationTargetException e) { //ignore, fallback further down } if (is == null) { is = Hyphenator.class.getResourceAsStream("/hyph/" + key + ".hyp"); } return is; } private static HyphenationTree readHyphenationTree(InputStream in) { HyphenationTree hTree = null; try { ObjectInputStream ois = new ObjectInputStream(in); hTree = (HyphenationTree)ois.readObject(); } catch (IOException ioe) { log.error("I/O error while loading precompiled hyphenation pattern file", ioe); } catch (ClassNotFoundException cnfe) { log.error("Error while reading hyphenation object from file", cnfe); } return hTree; } /** * Returns a hyphenation tree. This method looks in the resources (getResourceStream) for * the hyphenation patterns. * @param key the language/country key * @return the hyphenation tree or null if it wasn't found in the resources */ public static HyphenationTree getFopHyphenationTree(String key) { HyphenationTree hTree = null; ObjectInputStream ois = null; InputStream is = null; try { is = getResourceStream(key); if (is == null) { if (log.isDebugEnabled()) { log.debug("Couldn't find precompiled hyphenation pattern " + key + " in resources"); } return null; } hTree = readHyphenationTree(is); } finally { IOUtils.closeQuietly(ois); } return hTree; } /** * Load tree from serialized file or xml file * using configuration settings * @param key language key for the requested hyphenation file * @param resourceResolver resource resolver to find the hyphenation files * @return the requested HypenationTree or null if it is not available */ public static HyphenationTree getUserHyphenationTree(String key, InternalResourceResolver resourceResolver) { HyphenationTree hTree = null; // I use here the following convention. The file name specified in // the configuration is taken as the base name. First we try // name + ".hyp" assuming a serialized HyphenationTree. If that fails // we try name + ".xml", assumming a raw hyphenation pattern file. // first try serialized object String name = key + ".hyp"; if (key.endsWith(HYPTYPE)) { name = key.replace(HYPTYPE, ""); } if (!key.endsWith(XMLTYPE)) { try { InputStream in = getHyphenationTreeStream(name, resourceResolver); try { hTree = readHyphenationTree(in); } finally { IOUtils.closeQuietly(in); } return hTree; } catch (IOException ioe) { if (log.isDebugEnabled()) { log.debug("I/O problem while trying to load " + name, ioe); } } } // try the raw XML file name = key + ".xml"; if (key.endsWith(XMLTYPE)) { name = key.replace(XMLTYPE, ""); } hTree = new HyphenationTree(); try { InputStream in = getHyphenationTreeStream(name, resourceResolver); try { InputSource src = new InputSource(in); src.setSystemId(name); hTree.loadPatterns(src); } finally { IOUtils.closeQuietly(in); } if (statisticsDump) { System.out.println("Stats: "); hTree.printStats(); } return hTree; } catch (HyphenationException ex) { log.error("Can't load user patterns from XML file " + name + ": " + ex.getMessage()); return null; } catch (IOException ioe) { if (log.isDebugEnabled()) { log.debug("I/O problem while trying to load " + name, ioe); } return null; } } private static InputStream getHyphenationTreeStream(String name, InternalResourceResolver resourceResolver) throws IOException { try { return new BufferedInputStream(resourceResolver.getResource(name)); } catch (URISyntaxException use) { log.debug("An exception was thrown while attempting to load " + name, use); } return null; } /** * Hyphenates a word. * @param lang the language * @param country the optional country code (may be null or "none") * @param resourceResolver resolver to find the hyphenation files * @param hyphPatNames the map with user-configured hyphenation pattern file names * @param word the word to hyphenate * @param leftMin the minimum number of characters before the hyphenation point * @param rightMin the minimum number of characters after the hyphenation point * @return the hyphenation result */ public static Hyphenation hyphenate(String lang, String country, InternalResourceResolver resourceResolver, Map hyphPatNames, String word, int leftMin, int rightMin) { return hyphenate(lang, country, resourceResolver, hyphPatNames, word, leftMin, rightMin, null); } public static Hyphenation hyphenate(String lang, String country, InternalResourceResolver resourceResolver, Map hyphPatNames, String word, int leftMin, int rightMin, EventBroadcaster eventBroadcaster) { HyphenationTree hTree = getHyphenationTree(lang, country, resourceResolver, hyphPatNames, eventBroadcaster); if (hTree == null) { return null; } return hTree.hyphenate(word, leftMin, rightMin); } }