gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
package controlP5;
/**
* controlP5 is a processing gui library.
*
* 2006-2012 by Andreas Schlegel
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* as published by the Free Software Foundation; either version 2.1
* of the License, or (at your option) any later version.
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General
* Public License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place, Suite 330,
* Boston, MA 02111-1307 USA
*
* @author Andreas Schlegel (http://www.sojamo.de)
* @modified 12/23/2012
* @version 2.0.4
*
*/
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
import processing.core.PApplet;
import processing.core.PConstants;
import processing.core.PVector;
import processing.event.KeyEvent;
import processing.event.MouseEvent;
import controlP5.ControlP5Base.KeyCode;
/**
* the purpose of a control window is to shift controllers from the main window into a separate window. to save cpu, a control window is not
* updated when not active - in focus. for the same reason the framerate is set to 15. To constantly update the control window, use
* {@link ControlWindow#setUpdateMode(int)}
*
* @example controllers/ControlP5window
*/
public final class ControlWindow {
protected ControlP5 cp5;
protected Controller<?> isControllerActive;
public int background = 0x00000000;
protected CColor color = new CColor();
private String _myName = "main";
protected PApplet _myApplet;
protected ControllerList _myTabs;
protected boolean isVisible = true;
protected boolean isInit = false;
protected boolean isRemove = false;
protected CDrawable _myDrawable;
protected boolean isAutoDraw;
protected boolean isUpdate;
protected List<Canvas> _myCanvas;
protected boolean isDrawBackground = true;
protected boolean isUndecorated = false;
protected PVector autoPosition = new PVector(10, 30, 0);
protected float tempAutoPositionHeight = 0;
protected boolean rendererNotification = false;
protected PVector positionOfTabs = new PVector(0, 0, 0);
private int _myFrameCount = 0;
private boolean isMouse = true;
private Pointer _myPointer;
private int mouseWheelMoved = 0;
private List<ControllerInterface<?>> mouseoverlist;
private boolean isMouseOver;
protected int mouseX;
protected int mouseY;
protected int pmouseX;
protected int pmouseY;
protected boolean mousePressed;
protected boolean mouselock;
protected char key;
protected int keyCode;
private boolean[] keys = new boolean[525];
private int numOfActiveKeys = 0;
private boolean focused = true;
/**
* @exclude
*/
public ControlWindow(final ControlP5 theControlP5, final PApplet theApplet) {
mouseoverlist = new ArrayList<ControllerInterface<?>>();
cp5 = theControlP5;
_myApplet = theApplet;
isAutoDraw = true;
init();
}
protected void init() {
_myPointer = new Pointer();
_myCanvas = new ArrayList<Canvas>();
_myTabs = new ControllerList();
_myTabs.add(new Tab(cp5, this, "global"));
_myTabs.add(new Tab(cp5, this, "default"));
activateTab((Tab) _myTabs.get(1));
/*
* register a post event that will be called by processing after the draw method has been finished.
*/
// processing pre 2.0 will not draw automatically if in P3D mode. in earlier versions of controlP5
// this had been checked here and the user had been informed to draw controlP5 manually by adding
// cp5.draw() to the sketch's draw function. with processing 2.0 and this version of controlP5
// this notification does no longer exist.
if (isInit == false) {
_myApplet.registerMethod("pre", this);
_myApplet.registerMethod("draw", this);
if (!cp5.isAndroid) {
_myApplet.registerMethod("keyEvent", this);
_myApplet.registerMethod("mouseEvent", this);
}
}
isInit = true;
}
public Tab getCurrentTab() {
for (int i = 1; i < _myTabs.size(); i++) {
if (((Tab) _myTabs.get(i)).isActive()) {
return (Tab) _myTabs.get(i);
}
}
return null;
}
public ControlWindow activateTab(String theTab) {
for (int i = 1; i < _myTabs.size(); i++) {
if (((Tab) _myTabs.get(i)).getName().equals(theTab)) {
if (!((Tab) _myTabs.get(i)).isActive) {
resetMouseOver();
}
activateTab((Tab) _myTabs.get(i));
}
}
return this;
}
public ControlWindow removeTab(Tab theTab) {
_myTabs.remove(theTab);
return this;
}
public Tab add(Tab theTab) {
_myTabs.add(theTab);
return theTab;
}
public Tab addTab(String theTab) {
return getTab(theTab);
}
protected ControlWindow activateTab(Tab theTab) {
for (int i = 1; i < _myTabs.size(); i++) {
if (_myTabs.get(i) == theTab) {
if (!((Tab) _myTabs.get(i)).isActive) {
resetMouseOver();
}
((Tab) _myTabs.get(i)).setActive(true);
} else {
((Tab) _myTabs.get(i)).setActive(false);
}
}
return this;
}
public ControllerList getTabs() {
return _myTabs;
}
public Tab getTab(String theTabName) {
return cp5.getTab(this, theTabName);
}
/**
* Sets the position of the tab bar which is set to 0,0 by default. to move the tabs to y-position 100, use
* cp5.window().setPositionOfTabs(new PVector(0,100,0));
*
* @param thePVector
*/
public ControlWindow setPositionOfTabs(PVector thePVector) {
positionOfTabs.set(thePVector);
return this;
}
public ControlWindow setPositionOfTabs(int theX, int theY) {
positionOfTabs.set(theX, theY, positionOfTabs.z);
return this;
}
/**
* Returns the position of the tab bar as PVector. to move the tabs to y-position 100, use cp5.window().getPositionOfTabs().y = 100; or
* cp5.window().setPositionOfTabs(new PVector(0,100,0));
*
* @return PVector
*/
public PVector getPositionOfTabs() {
return positionOfTabs;
}
void setAllignmentOfTabs(int theValue, int theWidth) {
// TODO
}
void setAllignmentOfTabs(int theValue, int theWidth, int theHeight) {
// TODO
}
void setAllignmentOfTabs(int theValue) {
// TODO
}
public void remove() {
for (int i = _myTabs.size() - 1; i >= 0; i--) {
((Tab) _myTabs.get(i)).remove();
}
_myTabs.clear();
_myTabs.clearDrawable();
}
/**
* clear the control window, delete all controllers from a control window.
*/
public ControlWindow clear() {
remove();
return this;
}
protected void updateFont(ControlFont theControlFont) {
for (int i = 0; i < _myTabs.size(); i++) {
((Tab) _myTabs.get(i)).updateFont(theControlFont);
}
}
/**
* @exclude
*/
@ControlP5.Invisible public void updateEvents() {
handleMouseOver();
handleMouseWheelMoved();
if (_myTabs.size() <= 0) {
return;
}
((ControllerInterface<?>) _myTabs.get(0)).updateEvents();
for (int i = 1; i < _myTabs.size(); i++) {
((Tab) _myTabs.get(i)).continuousUpdateEvents();
if (((Tab) _myTabs.get(i)).isActive() && ((Tab) _myTabs.get(i)).isVisible()) {
((ControllerInterface<?>) _myTabs.get(i)).updateEvents();
}
}
}
/**
* returns true if the mouse is inside a controller. !!! doesnt work for groups yet.
*/
public boolean isMouseOver() {
// TODO doesnt work for all groups yet, only ListBox and DropdownList.
if (_myFrameCount + 1 < _myApplet.frameCount) {
resetMouseOver();
}
return isVisible ? isMouseOver : false;
}
public boolean isMouseOver(ControllerInterface<?> theController) {
return mouseoverlist.contains(theController);
}
public void resetMouseOver() {
isMouseOver = false;
for (int i = mouseoverlist.size() - 1; i >= 0; i--) {
mouseoverlist.get(i).setMouseOver(false);
}
mouseoverlist.clear();
}
public ControllerInterface<?> getFirstFromMouseOverList() {
if (getMouseOverList().isEmpty()) {
return null;
} else {
return getMouseOverList().get(0);
}
}
/**
* A list of controllers that are registered with a mouseover.
*/
public List<ControllerInterface<?>> getMouseOverList() {
return mouseoverlist;
}
private ControlWindow handleMouseOver() {
for (int i = mouseoverlist.size() - 1; i >= 0; i--) {
if (!mouseoverlist.get(i).isMouseOver() || !isVisible) {
mouseoverlist.remove(i);
}
}
isMouseOver = mouseoverlist.size() > 0;
return this;
}
public ControlWindow removeMouseOverFor(ControllerInterface<?> theController) {
mouseoverlist.remove(theController);
return this;
}
protected ControlWindow setMouseOverController(ControllerInterface<?> theController) {
if (!mouseoverlist.contains(theController) && isVisible && theController.isVisible()) {
mouseoverlist.add(theController);
}
isMouseOver = true;
return this;
}
/**
* updates all controllers inside the control window if update is enabled.
*
* @exclude
*/
public void update() {
((ControllerInterface<?>) _myTabs.get(0)).update();
for (int i = 1; i < _myTabs.size(); i++) {
((Tab) _myTabs.get(i)).update();
}
}
/**
* enable or disable the update function of a control window.
*/
public void setUpdate(boolean theFlag) {
isUpdate = theFlag;
for (int i = 0; i < _myTabs.size(); i++) {
((ControllerInterface<?>) _myTabs.get(i)).setUpdate(theFlag);
}
}
/**
* check the update status of a control window.
*/
public boolean isUpdate() {
return isUpdate;
}
public ControlWindow addCanvas(Canvas theCanvas) {
_myCanvas.add(theCanvas);
theCanvas.setControlWindow(this);
theCanvas.setup(_myApplet);
return this;
}
public ControlWindow removeCanvas(Canvas theCanvas) {
_myCanvas.remove(theCanvas);
return this;
}
private boolean isReset = false;
public ControlWindow pre() {
if (_myFrameCount + 1 < _myApplet.frameCount) {
if (isReset) {
resetMouseOver();
isReset = false;
}
} else {
isReset = true;
}
if (papplet().focused != focused) {
clearKeys();
mousePressed = false;
focused = papplet().focused;
}
return this;
}
boolean pmouseReleased; // Android
boolean pmousePressed; // Android
/**
* when in Android mode, call mouseEvent(int, int, boolean).
*
* @param theX
* @param theY
* @param pressed
*/
public void mouseEvent(int theX, int theY, boolean pressed) {
mouseX = theX;
mouseY = theY;
if (pressed && !pmousePressed) {
updateEvents();
mousePressedEvent();
pmousePressed = true;
pmouseReleased = false;
} else if (!pressed && !pmouseReleased) {
updateEvents();
mouseReleasedEvent();
for (ControllerInterface c : mouseoverlist) {
if (c instanceof Controller) {
((Controller) c).onLeave();
((Controller) c).onRelease();
} else if (c instanceof ControllerGroup) {
((ControllerGroup) c).mouseReleased();
}
}
resetMouseOver();
pmousePressed = false;
pmouseReleased = true;
}
}
/**
* @exclude
* @param theMouseEvent MouseEvent
*/
public void mouseEvent(MouseEvent theMouseEvent) {
if (isMouse) {
mouseX = theMouseEvent.getX();
mouseY = theMouseEvent.getY();
if (theMouseEvent.getAction() == MouseEvent.PRESS) {
mousePressedEvent();
}
if (theMouseEvent.getAction() == MouseEvent.RELEASE) {
mouseReleasedEvent();
}
}
}
public void keyEvent(KeyEvent theKeyEvent) {
if (theKeyEvent.getAction() == KeyEvent.PRESS) {
// allow special keys such as backspace, arrow left,
// arrow right to pass test when active
if (keys[theKeyEvent.getKeyCode()] && theKeyEvent.getKeyCode() != 8 && theKeyEvent.getKeyCode() != 37 && theKeyEvent.getKeyCode() != 39) {
return;
}
keys[theKeyEvent.getKeyCode()] = true;
numOfActiveKeys++;
cp5.modifiers = theKeyEvent.getModifiers();
key = theKeyEvent.getKey();
keyCode = theKeyEvent.getKeyCode();
}
if (theKeyEvent.getAction() == KeyEvent.RELEASE) {
keys[theKeyEvent.getKeyCode()] = false;
numOfActiveKeys--;
cp5.modifiers = theKeyEvent.getModifiers();
}
if (theKeyEvent.getAction() == KeyEvent.PRESS && cp5.isShortcuts()) {
int n = 0;
for (boolean b : keys) {
n += b ? 1 : 0;
}
char[] c = new char[n];
n = 0;
for (int i = 0; i < keys.length; i++) {
if (keys[i]) {
c[n++] = ((char) i);
}
}
KeyCode code = new KeyCode(c);
if (cp5.keymap.containsKey(code)) {
for (ControlKey ck : cp5.keymap.get(code)) {
ck.keyEvent();
}
}
}
handleKeyEvent(theKeyEvent);
}
public void clearKeys() {
keys = new boolean[525];
numOfActiveKeys = 0;
}
/**
* @exclude draw content.
*/
public void draw() {
_myFrameCount = _myApplet.frameCount;
if (cp5.blockDraw == false) {
if (cp5.isAndroid) {
mouseEvent(cp5.papplet.mouseX, cp5.papplet.mouseY, cp5.papplet.mousePressed);
} else {
updateEvents();
}
if (isVisible) {
// TODO save stroke, noStroke, fill, noFill, strokeWeight
// parameters and restore after drawing controlP5 elements.
int myRectMode = _myApplet.g.rectMode;
int myEllipseMode = _myApplet.g.ellipseMode;
int myImageMode = _myApplet.g.imageMode;
_myApplet.pushStyle();
_myApplet.rectMode(PConstants.CORNER);
_myApplet.ellipseMode(PConstants.CORNER);
_myApplet.imageMode(PConstants.CORNER);
_myApplet.noStroke();
if (_myDrawable != null) {
_myDrawable.draw(_myApplet);
}
for (int i = 0; i < _myCanvas.size(); i++) {
if ((_myCanvas.get(i)).mode() == Canvas.PRE) {
(_myCanvas.get(i)).draw(_myApplet);
}
}
_myApplet.noStroke();
_myApplet.noFill();
int myOffsetX = (int) getPositionOfTabs().x;
int myOffsetY = (int) getPositionOfTabs().y;
int myHeight = 0;
if (_myTabs.size() > 0) {
for (int i = 1; i < _myTabs.size(); i++) {
if (((Tab) _myTabs.get(i)).isVisible()) {
if (myHeight < ((Tab) _myTabs.get(i)).height()) {
myHeight = ((Tab) _myTabs.get(i)).height();
}
// conflicts with Android, getWidth not found TODO
// if (myOffsetX > (papplet().getWidth()) - ((Tab) _myTabs.get(i)).width()) {
// myOffsetY += myHeight + 1;
// myOffsetX = (int) getPositionOfTabs().x;
// myHeight = 0;
// }
((Tab) _myTabs.get(i)).setOffset(myOffsetX, myOffsetY);
if (((Tab) _myTabs.get(i)).isActive()) {
((Tab) _myTabs.get(i)).draw(_myApplet);
}
if (((Tab) _myTabs.get(i)).updateLabel()) {
((Tab) _myTabs.get(i)).drawLabel(_myApplet);
}
myOffsetX += ((Tab) _myTabs.get(i)).width();
}
}
((ControllerInterface<?>) _myTabs.get(0)).draw(_myApplet);
}
for (int i = 0; i < _myCanvas.size(); i++) {
if ((_myCanvas.get(i)).mode() == Canvas.POST) {
(_myCanvas.get(i)).draw(_myApplet);
}
}
pmouseX = mouseX;
pmouseY = mouseY;
// draw Tooltip here.
cp5.getTooltip().draw(this);
_myApplet.rectMode(myRectMode);
_myApplet.ellipseMode(myEllipseMode);
_myApplet.imageMode(myImageMode);
_myApplet.popStyle();
}
}
}
/**
* Adds a custom context to a ControlWindow. Use a custom class which implements the CDrawable interface
*
* @see controlP5.CDrawable
* @param theDrawable CDrawable
*/
public ControlWindow setContext(CDrawable theDrawable) {
_myDrawable = theDrawable;
return this;
}
/**
* returns the name of the control window.
*/
public String name() {
return _myName;
}
private void mousePressedEvent() {
if (isVisible) {
mousePressed = true;
for (int i = 0; i < _myTabs.size(); i++) {
if (((ControllerInterface<?>) _myTabs.get(i)).setMousePressed(true)) {
mouselock = true;
return;
}
}
}
}
private void mouseReleasedEvent() {
if (isVisible) {
mousePressed = false;
mouselock = false;
for (int i = 0; i < _myTabs.size(); i++) {
((ControllerInterface<?>) _myTabs.get(i)).setMousePressed(false);
}
}
}
void setMouseWheelRotation(int theRotation) {
if (isMouseOver()) {
mouseWheelMoved = theRotation;
}
}
@SuppressWarnings("unchecked") private void handleMouseWheelMoved() {
if (mouseWheelMoved != 0) {
CopyOnWriteArrayList<ControllerInterface<?>> mouselist = new CopyOnWriteArrayList<ControllerInterface<?>>(mouseoverlist);
for (ControllerInterface<?> c : mouselist) {
if (c.isVisible()) {
if (c instanceof Controller) {
((Controller) c).onScroll(mouseWheelMoved);
}
if (c instanceof ControllerGroup) {
((ControllerGroup) c).onScroll(mouseWheelMoved);
}
if (c instanceof Slider) {
((Slider) c).scrolled(mouseWheelMoved);
} else if (c instanceof Knob) {
((Knob) c).scrolled(mouseWheelMoved);
} else if (c instanceof Numberbox) {
((Numberbox) c).scrolled(mouseWheelMoved);
} else if (c instanceof ListBox) {
((ListBox) c).scrolled(mouseWheelMoved);
} else if (c instanceof DropdownList) {
((DropdownList) c).scrolled(mouseWheelMoved);
} else if (c instanceof Textarea) {
((Textarea) c).scrolled(mouseWheelMoved);
}
break;
}
}
}
mouseWheelMoved = 0;
}
public boolean isMousePressed() {
return mousePressed;
}
/**
* @exclude
* @param theKeyEvent KeyEvent
*/
public void handleKeyEvent(KeyEvent theKeyEvent) {
for (int i = 0; i < _myTabs.size(); i++) {
((ControllerInterface<?>) _myTabs.get(i)).keyEvent(theKeyEvent);
}
}
/**
* set the color for the controller while active.
*/
public ControlWindow setColorActive(int theColor) {
color.setActive(theColor);
for (int i = 0; i < getTabs().size(); i++) {
((Tab) getTabs().get(i)).setColorActive(theColor);
}
return this;
}
/**
* set the foreground color of the controller.
*/
public ControlWindow setColorForeground(int theColor) {
color.setForeground(theColor);
for (int i = 0; i < getTabs().size(); i++) {
((Tab) getTabs().get(i)).setColorForeground(theColor);
}
return this;
}
/**
* set the background color of the controller.
*/
public ControlWindow setColorBackground(int theColor) {
color.setBackground(theColor);
for (int i = 0; i < getTabs().size(); i++) {
((Tab) getTabs().get(i)).setColorBackground(theColor);
}
return this;
}
/**
* set the color of the text label of the controller.
*/
public ControlWindow setColorLabel(int theColor) {
color.setCaptionLabel(theColor);
for (int i = 0; i < getTabs().size(); i++) {
((Tab) getTabs().get(i)).setColorLabel(theColor);
}
return this;
}
/**
* set the color of the values.
*/
public ControlWindow setColorValue(int theColor) {
color.setValueLabel(theColor);
for (int i = 0; i < getTabs().size(); i++) {
((Tab) getTabs().get(i)).setColorValue(theColor);
}
return this;
}
/**
* set the background color of the control window.
*/
public ControlWindow setBackground(int theValue) {
background = theValue;
return this;
}
/**
* get the papplet instance of the ControlWindow.
*/
public PApplet papplet() {
return _myApplet;
}
/**
* sets the frame rate of the control window.
*
* @param theFrameRate
* @return ControlWindow
*/
public ControlWindow frameRate(int theFrameRate) {
_myApplet.frameRate(theFrameRate);
return this;
}
public ControlWindow show() {
isVisible = true;
return this;
}
/**
* by default the background of a controlWindow is filled with a background color every frame. to enable or disable the background from
* drawing, use setDrawBackgorund(true/false).
*
* @param theFlag
* @return ControlWindow
*/
public ControlWindow setDrawBackground(boolean theFlag) {
isDrawBackground = theFlag;
return this;
}
public boolean isDrawBackground() {
return isDrawBackground;
}
public boolean isVisible() {
return isVisible;
}
protected boolean isControllerActive(Controller<?> theController) {
if (isControllerActive == null) {
return false;
}
return isControllerActive.equals(theController);
}
protected ControlWindow setControllerActive(Controller<?> theController) {
isControllerActive = theController;
return this;
}
public ControlWindow toggleUndecorated() {
setUndecorated(!isUndecorated());
return this;
}
public ControlWindow setUndecorated(boolean theFlag) {
if (theFlag != isUndecorated()) {
isUndecorated = theFlag;
_myApplet.frame.removeNotify();
_myApplet.frame.setUndecorated(isUndecorated);
_myApplet.setSize(_myApplet.width, _myApplet.height);
_myApplet.setBounds(0, 0, _myApplet.width, _myApplet.height);
_myApplet.frame.setSize(_myApplet.width, _myApplet.height);
_myApplet.frame.addNotify();
}
return this;
}
public boolean isUndecorated() {
return isUndecorated;
}
public ControlWindow setPosition(int theX, int theY) {
return setLocation(theX, theY);
}
public ControlWindow setLocation(int theX, int theY) {
_myApplet.frame.setLocation(theX, theY);
return this;
}
public Pointer getPointer() {
return _myPointer;
}
public ControlWindow disablePointer() {
_myPointer.disable();
return this;
}
public ControlWindow enablePointer() {
_myPointer.enable();
return this;
}
/**
* A pointer by default is linked to the mouse and stores the x and y position as well as the pressed and released state. The pointer
* can be accessed by its getter method {@link ControlWindow#getPointer()}. Then use {@link controlP5.ControlWindow#set(int, int)} to
* alter its position or invoke { {@link controlP5.ControlWindow#pressed()} or {@link controlP5.ControlWindow#released()} to change its
* state. To disable the mouse and enable the Pointer use {@link controlP5.ControlWindow#enable()} and
* {@link controlP5.ControlWindow#disable()} to default back to the mouse as input parameter.
*/
public class Pointer {
public Pointer setX(int theX) {
mouseX = theX;
return this;
}
public Pointer setY(int theY) {
mouseY = theY;
return this;
}
public int getY() {
return mouseY;
}
public int getX() {
return mouseX;
}
public int getPreviousX() {
return pmouseX;
}
public int getPreviousY() {
return pmouseY;
}
public Pointer set(int theX, int theY) {
setX(theX);
setY(theY);
return this;
}
public Pointer set(int theX, int theY, boolean pressed) {
setX(theX);
setY(theY);
if (pressed) {
if (!mousePressed) {
pressed();
}
} else {
if (mousePressed) {
released();
}
}
return this;
}
public Pointer pressed() {
mousePressedEvent();
return this;
}
public Pointer released() {
mouseReleasedEvent();
return this;
}
public void enable() {
isMouse = false;
}
public void disable() {
isMouse = true;
}
public boolean isEnabled() {
return !isMouse;
}
}
/**
* hide the controllers and tabs of the ControlWindow.
*/
public ControlWindow hide() {
isVisible = false;
isMouseOver = false;
return this;
}
/**
* @exclude
* @deprecated
*/
@Deprecated public ControllerList tabs() {
return _myTabs;
}
/**
* @exclude
* @deprecated
*/
@Deprecated public Tab tab(String theTabName) {
return cp5.getTab(this, theTabName);
}
/**
* @deprecated
* @exclude
*/
@Deprecated public Tab currentTab() {
for (int i = 1; i < _myTabs.size(); i++) {
if (((Tab) _myTabs.get(i)).isActive()) {
return (Tab) _myTabs.get(i);
}
}
return null;
}
/**
* @exclude
* @deprecated
* @param theMode
*/
@Deprecated public void setMode(int theMode) {
setUpdateMode(theMode);
}
/**
* @deprecated
* @exclude
* @param theCoordinates
*/
@ControlP5.Invisible public void multitouch(int[][] theCoordinates) {
// removed
}
/**
* @deprecated enable smooth controlWindow rendering.
*/
public ControlWindow smooth() {
return this;
}
/**
* @deprecated disable smooth controlWindow rendering.
*/
public ControlWindow noSmooth() {
return this;
}
/**
* @deprecated set the title of a control window. only applies to control windows of type PAppletWindow.
*/
public ControlWindow setTitle(String theTitle) {
return this;
}
/**
* @deprecated shows the xy coordinates displayed in the title of a control window. only applies to control windows of type
* PAppletWindow.
*
* @param theFlag
*/
public ControlWindow showCoordinates() {
return this;
}
/**
* @deprecated hide the xy coordinates displayed in the title of a control window. only applies to control windows of type
* PAppletWindow.
*
* @param theFlag
*/
public ControlWindow hideCoordinates() {
return this;
}
/**
* @deprecated set the draw mode of a control window. a separate control window is only updated when in focus. to update the context of
* the window continuously, use yourControlWindow.setUpdateMode(ControlWindow.NORMAL); otherwise use
* yourControlWindow.setUpdateMode(ControlWindow.ECONOMIC); for an economic, less cpu intensive update.
*
* @param theMode
*/
public ControlWindow setUpdateMode(int theMode) {
return this;
}
@Deprecated public ControlWindow disableMouseWheel() {
return this;
}
@Deprecated public ControlWindow enableMouseWheel() {
return this;
}
@Deprecated public boolean isMouseWheel() {
return false;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache;
import java.util.List;
import org.apache.geode.annotations.Immutable;
import org.apache.geode.cache.GemFireCache;
import org.apache.geode.internal.cache.xmlcache.CacheServerCreation;
import org.apache.geode.pdx.PdxSerializer;
import org.apache.geode.pdx.ReflectionBasedAutoSerializer;
import org.apache.geode.security.PostProcessor;
import org.apache.geode.security.SecurityManager;
/**
* This is helper class used by CacheFactory to pass the cache configuration values to cache
* creation code.
*
* @since GemFire 6.6
*/
public class CacheConfig {
public static final boolean DEFAULT_PDX_READ_SERIALIZED = false;
@Immutable
public static final PdxSerializer DEFAULT_PDX_SERIALIZER = null;
public static final String DEFAULT_PDX_DISK_STORE = null;
public static final boolean DEFAULT_PDX_PERSISTENT = false;
public static final boolean DEFAULT_PDX_IGNORE_UNREAD_FIELDS = false;
private SecurityManager securityManager = null;
private PostProcessor postProcessor = null;
public boolean pdxReadSerialized = DEFAULT_PDX_READ_SERIALIZED;
/**
* cacheXMLDescription is used to reinitialize the cache after a reconnect. It overrides any
* cache.xml filename setting in distributed system properties.
*/
private String cacheXMLDescription = null;
/**
* list of cache servers to create after auto-reconnect if cluster configuration is being used
*/
private List<CacheServerCreation> cacheServerCreation;
/**
* This indicates if the pdxReadSerialized value is set by user. This is used during cache xml
* parsing. The value set by user api overrides the value set in cache.xml value.
*/
public boolean pdxReadSerializedUserSet = false;
public PdxSerializer pdxSerializer = DEFAULT_PDX_SERIALIZER;
public boolean pdxSerializerUserSet = false;
public String pdxDiskStore = DEFAULT_PDX_DISK_STORE;
public boolean pdxDiskStoreUserSet = false;
public boolean pdxPersistent = DEFAULT_PDX_PERSISTENT;
public boolean pdxPersistentUserSet = false;
public boolean pdxIgnoreUnreadFields = DEFAULT_PDX_IGNORE_UNREAD_FIELDS;
public boolean pdxIgnoreUnreadFieldsUserSet = false;
public boolean isPdxReadSerialized() {
return pdxReadSerialized;
}
public void setPdxReadSerialized(boolean pdxReadSerialized) {
this.pdxReadSerializedUserSet = true;
this.pdxReadSerialized = pdxReadSerialized;
}
public PdxSerializer getPdxSerializer() {
return pdxSerializer;
}
public SecurityManager getSecurityManager() {
return securityManager;
}
public void setSecurityManager(SecurityManager securityManager) {
this.securityManager = securityManager;
}
public void setPdxSerializer(PdxSerializer pdxSerializer) {
pdxSerializerUserSet = true;
this.pdxSerializer = pdxSerializer;
}
public PostProcessor getPostProcessor() {
return postProcessor;
}
public void setPostProcessor(PostProcessor postProcessor) {
this.postProcessor = postProcessor;
}
public String getPdxDiskStore() {
return pdxDiskStore;
}
public void setPdxDiskStore(String pdxDiskStore) {
this.pdxDiskStoreUserSet = true;
this.pdxDiskStore = pdxDiskStore;
}
public boolean isPdxPersistent() {
return pdxPersistent;
}
public void setPdxPersistent(boolean pdxPersistent) {
this.pdxPersistentUserSet = true;
this.pdxPersistent = pdxPersistent;
}
public boolean getPdxIgnoreUnreadFields() {
return this.pdxIgnoreUnreadFields;
}
public void setPdxIgnoreUnreadFields(boolean ignore) {
this.pdxIgnoreUnreadFields = ignore;
this.pdxIgnoreUnreadFieldsUserSet = true;
}
public String getCacheXMLDescription() {
return cacheXMLDescription;
}
public void setCacheXMLDescription(String cacheXMLDescription) {
this.cacheXMLDescription = cacheXMLDescription;
}
public List<CacheServerCreation> getCacheServerCreation() {
return this.cacheServerCreation;
}
public void setCacheServerCreation(List<CacheServerCreation> servers) {
this.cacheServerCreation = servers;
}
public void validateCacheConfig(GemFireCache cacheInstance) {
// To fix bug 44961 only validate our attributes against the existing cache
// if they have been explicitly set by the set.
// So all the following "ifs" check that "*UserSet" is true.
// If they have not then we might use a cache.xml that will specify them.
// Since we don't have the cache.xml info here we need to only complain
// if we are sure that we will be incompatible with the existing cache.
if (this.pdxReadSerializedUserSet
&& this.pdxReadSerialized != cacheInstance.getPdxReadSerialized()) {
throw new IllegalStateException(
String.format("Existing cache has different cache configuration, it has:%s",
"pdxReadSerialized: " + cacheInstance.getPdxReadSerialized()));
}
if (this.pdxDiskStoreUserSet && !equals(this.pdxDiskStore, cacheInstance.getPdxDiskStore())) {
throw new IllegalStateException(
String.format("Existing cache has different cache configuration, it has:%s",
"pdxDiskStore: " + cacheInstance.getPdxDiskStore()));
}
if (this.pdxPersistentUserSet && this.pdxPersistent != cacheInstance.getPdxPersistent()) {
throw new IllegalStateException(
String.format("Existing cache has different cache configuration, it has:%s",
"pdxPersistent: " + cacheInstance.getPdxPersistent()));
}
if (this.pdxIgnoreUnreadFieldsUserSet
&& this.pdxIgnoreUnreadFields != cacheInstance.getPdxIgnoreUnreadFields()) {
throw new IllegalStateException(
String.format("Existing cache has different cache configuration, it has:%s",
"pdxIgnoreUnreadFields: " + cacheInstance.getPdxIgnoreUnreadFields()));
}
if (this.pdxSerializerUserSet
&& !samePdxSerializer(this.pdxSerializer, cacheInstance.getPdxSerializer())) {
throw new IllegalStateException(
String.format("Existing cache has different cache configuration, it has:%s",
"pdxSerializer: " + cacheInstance.getPdxSerializer()));
}
}
private boolean samePdxSerializer(PdxSerializer s1, PdxSerializer s2) {
Object o1 = s1;
Object o2 = s2;
if (s1 instanceof ReflectionBasedAutoSerializer
&& s2 instanceof ReflectionBasedAutoSerializer) {
// Fix for bug 44907.
o1 = ((ReflectionBasedAutoSerializer) s1).getManager();
o2 = ((ReflectionBasedAutoSerializer) s2).getManager();
}
return equals(o1, o2);
}
private boolean equals(Object o1, Object o2) {
if (o1 == null) {
return o2 == null;
}
if (o2 == null) {
return false;
}
return o1.equals(o2);
}
public void setDeclarativeConfig(CacheConfig cacheConfig) {
if (!this.pdxDiskStoreUserSet) {
this.pdxDiskStore = cacheConfig.getPdxDiskStore();
this.pdxDiskStoreUserSet = cacheConfig.pdxDiskStoreUserSet;
}
if (!this.pdxPersistentUserSet) {
this.pdxPersistent = cacheConfig.isPdxPersistent();
this.pdxPersistentUserSet = cacheConfig.pdxPersistentUserSet;
}
if (!this.pdxReadSerializedUserSet) {
this.pdxReadSerialized = cacheConfig.isPdxReadSerialized();
this.pdxReadSerializedUserSet = cacheConfig.pdxReadSerializedUserSet;
}
if (!this.pdxSerializerUserSet) {
this.pdxSerializer = cacheConfig.getPdxSerializer();
this.pdxSerializerUserSet = cacheConfig.pdxSerializerUserSet;
}
if (!this.pdxIgnoreUnreadFieldsUserSet) {
this.pdxIgnoreUnreadFields = cacheConfig.getPdxIgnoreUnreadFields();
this.pdxIgnoreUnreadFieldsUserSet = cacheConfig.pdxIgnoreUnreadFieldsUserSet;
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.datafactory.models;
import com.azure.core.annotation.Fluent;
import com.azure.core.util.logging.ClientLogger;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.annotation.JsonTypeName;
/** Azure blobFS read settings. */
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type")
@JsonTypeName("AzureBlobFSReadSettings")
@Fluent
public final class AzureBlobFSReadSettings extends StoreReadSettings {
@JsonIgnore private final ClientLogger logger = new ClientLogger(AzureBlobFSReadSettings.class);
/*
* If true, files under the folder path will be read recursively. Default
* is true. Type: boolean (or Expression with resultType boolean).
*/
@JsonProperty(value = "recursive")
private Object recursive;
/*
* Azure blobFS wildcardFolderPath. Type: string (or Expression with
* resultType string).
*/
@JsonProperty(value = "wildcardFolderPath")
private Object wildcardFolderPath;
/*
* Azure blobFS wildcardFileName. Type: string (or Expression with
* resultType string).
*/
@JsonProperty(value = "wildcardFileName")
private Object wildcardFileName;
/*
* Point to a text file that lists each file (relative path to the path
* configured in the dataset) that you want to copy. Type: string (or
* Expression with resultType string).
*/
@JsonProperty(value = "fileListPath")
private Object fileListPath;
/*
* Indicates whether to enable partition discovery.
*/
@JsonProperty(value = "enablePartitionDiscovery")
private Boolean enablePartitionDiscovery;
/*
* Specify the root path where partition discovery starts from. Type:
* string (or Expression with resultType string).
*/
@JsonProperty(value = "partitionRootPath")
private Object partitionRootPath;
/*
* Indicates whether the source files need to be deleted after copy
* completion. Default is false. Type: boolean (or Expression with
* resultType boolean).
*/
@JsonProperty(value = "deleteFilesAfterCompletion")
private Object deleteFilesAfterCompletion;
/*
* The start of file's modified datetime. Type: string (or Expression with
* resultType string).
*/
@JsonProperty(value = "modifiedDatetimeStart")
private Object modifiedDatetimeStart;
/*
* The end of file's modified datetime. Type: string (or Expression with
* resultType string).
*/
@JsonProperty(value = "modifiedDatetimeEnd")
private Object modifiedDatetimeEnd;
/**
* Get the recursive property: If true, files under the folder path will be read recursively. Default is true. Type:
* boolean (or Expression with resultType boolean).
*
* @return the recursive value.
*/
public Object recursive() {
return this.recursive;
}
/**
* Set the recursive property: If true, files under the folder path will be read recursively. Default is true. Type:
* boolean (or Expression with resultType boolean).
*
* @param recursive the recursive value to set.
* @return the AzureBlobFSReadSettings object itself.
*/
public AzureBlobFSReadSettings withRecursive(Object recursive) {
this.recursive = recursive;
return this;
}
/**
* Get the wildcardFolderPath property: Azure blobFS wildcardFolderPath. Type: string (or Expression with resultType
* string).
*
* @return the wildcardFolderPath value.
*/
public Object wildcardFolderPath() {
return this.wildcardFolderPath;
}
/**
* Set the wildcardFolderPath property: Azure blobFS wildcardFolderPath. Type: string (or Expression with resultType
* string).
*
* @param wildcardFolderPath the wildcardFolderPath value to set.
* @return the AzureBlobFSReadSettings object itself.
*/
public AzureBlobFSReadSettings withWildcardFolderPath(Object wildcardFolderPath) {
this.wildcardFolderPath = wildcardFolderPath;
return this;
}
/**
* Get the wildcardFileName property: Azure blobFS wildcardFileName. Type: string (or Expression with resultType
* string).
*
* @return the wildcardFileName value.
*/
public Object wildcardFileName() {
return this.wildcardFileName;
}
/**
* Set the wildcardFileName property: Azure blobFS wildcardFileName. Type: string (or Expression with resultType
* string).
*
* @param wildcardFileName the wildcardFileName value to set.
* @return the AzureBlobFSReadSettings object itself.
*/
public AzureBlobFSReadSettings withWildcardFileName(Object wildcardFileName) {
this.wildcardFileName = wildcardFileName;
return this;
}
/**
* Get the fileListPath property: Point to a text file that lists each file (relative path to the path configured in
* the dataset) that you want to copy. Type: string (or Expression with resultType string).
*
* @return the fileListPath value.
*/
public Object fileListPath() {
return this.fileListPath;
}
/**
* Set the fileListPath property: Point to a text file that lists each file (relative path to the path configured in
* the dataset) that you want to copy. Type: string (or Expression with resultType string).
*
* @param fileListPath the fileListPath value to set.
* @return the AzureBlobFSReadSettings object itself.
*/
public AzureBlobFSReadSettings withFileListPath(Object fileListPath) {
this.fileListPath = fileListPath;
return this;
}
/**
* Get the enablePartitionDiscovery property: Indicates whether to enable partition discovery.
*
* @return the enablePartitionDiscovery value.
*/
public Boolean enablePartitionDiscovery() {
return this.enablePartitionDiscovery;
}
/**
* Set the enablePartitionDiscovery property: Indicates whether to enable partition discovery.
*
* @param enablePartitionDiscovery the enablePartitionDiscovery value to set.
* @return the AzureBlobFSReadSettings object itself.
*/
public AzureBlobFSReadSettings withEnablePartitionDiscovery(Boolean enablePartitionDiscovery) {
this.enablePartitionDiscovery = enablePartitionDiscovery;
return this;
}
/**
* Get the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string (or
* Expression with resultType string).
*
* @return the partitionRootPath value.
*/
public Object partitionRootPath() {
return this.partitionRootPath;
}
/**
* Set the partitionRootPath property: Specify the root path where partition discovery starts from. Type: string (or
* Expression with resultType string).
*
* @param partitionRootPath the partitionRootPath value to set.
* @return the AzureBlobFSReadSettings object itself.
*/
public AzureBlobFSReadSettings withPartitionRootPath(Object partitionRootPath) {
this.partitionRootPath = partitionRootPath;
return this;
}
/**
* Get the deleteFilesAfterCompletion property: Indicates whether the source files need to be deleted after copy
* completion. Default is false. Type: boolean (or Expression with resultType boolean).
*
* @return the deleteFilesAfterCompletion value.
*/
public Object deleteFilesAfterCompletion() {
return this.deleteFilesAfterCompletion;
}
/**
* Set the deleteFilesAfterCompletion property: Indicates whether the source files need to be deleted after copy
* completion. Default is false. Type: boolean (or Expression with resultType boolean).
*
* @param deleteFilesAfterCompletion the deleteFilesAfterCompletion value to set.
* @return the AzureBlobFSReadSettings object itself.
*/
public AzureBlobFSReadSettings withDeleteFilesAfterCompletion(Object deleteFilesAfterCompletion) {
this.deleteFilesAfterCompletion = deleteFilesAfterCompletion;
return this;
}
/**
* Get the modifiedDatetimeStart property: The start of file's modified datetime. Type: string (or Expression with
* resultType string).
*
* @return the modifiedDatetimeStart value.
*/
public Object modifiedDatetimeStart() {
return this.modifiedDatetimeStart;
}
/**
* Set the modifiedDatetimeStart property: The start of file's modified datetime. Type: string (or Expression with
* resultType string).
*
* @param modifiedDatetimeStart the modifiedDatetimeStart value to set.
* @return the AzureBlobFSReadSettings object itself.
*/
public AzureBlobFSReadSettings withModifiedDatetimeStart(Object modifiedDatetimeStart) {
this.modifiedDatetimeStart = modifiedDatetimeStart;
return this;
}
/**
* Get the modifiedDatetimeEnd property: The end of file's modified datetime. Type: string (or Expression with
* resultType string).
*
* @return the modifiedDatetimeEnd value.
*/
public Object modifiedDatetimeEnd() {
return this.modifiedDatetimeEnd;
}
/**
* Set the modifiedDatetimeEnd property: The end of file's modified datetime. Type: string (or Expression with
* resultType string).
*
* @param modifiedDatetimeEnd the modifiedDatetimeEnd value to set.
* @return the AzureBlobFSReadSettings object itself.
*/
public AzureBlobFSReadSettings withModifiedDatetimeEnd(Object modifiedDatetimeEnd) {
this.modifiedDatetimeEnd = modifiedDatetimeEnd;
return this;
}
/** {@inheritDoc} */
@Override
public AzureBlobFSReadSettings withMaxConcurrentConnections(Object maxConcurrentConnections) {
super.withMaxConcurrentConnections(maxConcurrentConnections);
return this;
}
/** {@inheritDoc} */
@Override
public AzureBlobFSReadSettings withDisableMetricsCollection(Object disableMetricsCollection) {
super.withDisableMetricsCollection(disableMetricsCollection);
return this;
}
/**
* Validates the instance.
*
* @throws IllegalArgumentException thrown if the instance is not valid.
*/
@Override
public void validate() {
super.validate();
}
}
| |
// Copyright (c) 2003-present, Jodd Team (http://jodd.org)
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
package jodd.proxetta;
import jodd.proxetta.data.*;
import jodd.proxetta.impl.ProxyProxetta;
import jodd.proxetta.impl.ProxyProxettaBuilder;
import jodd.proxetta.pointcuts.AllMethodsPointcut;
import jodd.util.StringUtil;
import org.junit.Test;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import static org.junit.Assert.*;
public class SubclassTest {
@Test
public void test1() {
ProxyAspect a1 = new ProxyAspect(FooProxyAdvice.class, new ProxyPointcut() {
public boolean apply(MethodInfo methodInfo) {
return true;
}
});
/*
byte[] b = Proxetta.withAspects(a1).createProxy(Foo.class);
try {
FileUtil.writeBytes("d:\\Foo.class", b);
} catch (IOException e) {
e.printStackTrace();
}
*/
ProxyProxetta proxyProxetta = ProxyProxetta.withAspects(a1);
proxyProxetta.setClassNameSuffix("$$$Proxetta");
ProxyProxettaBuilder pb = proxyProxetta.builder();
pb.setTarget(Foo.class);
Foo foo = (Foo) pb.newInstance();
Class fooProxyClass = foo.getClass();
assertNotNull(fooProxyClass);
Method[] methods = fooProxyClass.getMethods();
assertEquals(12, methods.length);
try {
fooProxyClass.getMethod("m1");
} catch (NoSuchMethodException nsmex) {
fail(nsmex.toString());
}
methods = fooProxyClass.getDeclaredMethods();
assertEquals(15, methods.length);
try {
fooProxyClass.getDeclaredMethod("m2");
} catch (NoSuchMethodException nsmex) {
fail(nsmex.toString());
}
}
@Test
public void testProxyClassNames() {
ProxyProxetta proxyProxetta = ProxyProxetta.withAspects(new ProxyAspect(FooProxyAdvice.class, new AllMethodsPointcut()));
proxyProxetta.setVariableClassName(true);
ProxyProxettaBuilder builder = proxyProxetta.builder();
builder.setTarget(Foo.class);
Foo foo = (Foo) builder.newInstance();
assertNotNull(foo);
assertEquals(Foo.class.getName() + "$$Proxetta", StringUtil.substring(foo.getClass().getName(), 0, -1));
builder = proxyProxetta.builder();
builder.setTarget(Foo.class);
foo = (Foo) builder.newInstance();
assertNotNull(foo);
assertEquals(Foo.class.getName() + "$$Proxetta", StringUtil.substring(foo.getClass().getName(), 0, -1));
proxyProxetta.setClassNameSuffix("$$Ppp");
builder = proxyProxetta.builder();
builder.setTarget(Foo.class);
foo = (Foo) builder.newInstance();
assertNotNull(foo);
assertEquals(Foo.class.getName() + "$$Ppp", StringUtil.substring(foo.getClass().getName(), 0, -1));
proxyProxetta.setClassNameSuffix("$$Proxetta");
proxyProxetta.setVariableClassName(false);
builder = proxyProxetta.builder(Foo.class, ".Too");
foo = (Foo) builder.newInstance();
assertNotNull(foo);
assertEquals(Foo.class.getPackage().getName() + ".Too$$Proxetta", foo.getClass().getName());
builder = proxyProxetta.builder();
builder.setTarget(Foo.class);
builder.setTargetProxyClassName("foo.");
foo = (Foo) builder.newInstance();
assertNotNull(foo);
assertEquals("foo.Foo$$Proxetta", foo.getClass().getName());
proxyProxetta.setClassNameSuffix(null);
builder = proxyProxetta.builder();
builder.setTargetProxyClassName("foo.Fff");
builder.setTarget(Foo.class);
foo = (Foo) builder.newInstance();
assertNotNull(foo);
assertEquals("foo.Fff", foo.getClass().getName());
}
@Test
public void testInnerOverride() {
ProxyProxetta proxyProxetta = ProxyProxetta.withAspects(new ProxyAspect(FooProxyAdvice.class, new AllMethodsPointcut()));
ProxyProxettaBuilder builder = proxyProxetta.builder();
builder.setTarget(Two.class);
builder.setTargetProxyClassName("foo.");
Two two = (Two) builder.newInstance();
assertNotNull(two);
assertEquals("foo.Two$$Proxetta", two.getClass().getName());
}
@Test
public void testJdk() throws Exception {
ProxyProxetta proxyProxetta = ProxyProxetta.withAspects(new ProxyAspect(StatCounterAdvice.class, new AllMethodsPointcut()));
proxyProxetta.setVariableClassName(false);
ProxyProxettaBuilder builder = proxyProxetta.builder();
builder.setTarget(Object.class);
try {
builder.define();
fail("Default class loader should not load java.*");
} catch (RuntimeException rex) {
// ignore
}
builder = proxyProxetta.builder();
builder.setTarget(Object.class);
builder.setTargetProxyClassName("foo.");
Object object = builder.newInstance();
assertNotNull(object);
assertEquals("foo.Object$$Proxetta", object.getClass().getName());
System.out.println("----------list");
StatCounter.counter = 0;
builder = proxyProxetta.builder(ArrayList.class, "foo.");
List list = (List) builder.newInstance();
assertNotNull(list);
assertEquals("foo.ArrayList$$Proxetta", list.getClass().getName());
assertEquals(1, StatCounter.counter);
list.add(new Integer(1));
assertTrue(StatCounter.counter == 3 || StatCounter.counter == 2);
System.out.println("----------set");
builder = proxyProxetta.builder(HashSet.class, "foo.");
Set set = (Set) builder.newInstance();
assertNotNull(set);
assertEquals("foo.HashSet$$Proxetta", set.getClass().getName());
assertTrue(StatCounter.counter == 4 || StatCounter.counter == 3);
set.add(new Integer(1));
assertTrue(StatCounter.counter == 5 || StatCounter.counter == 4);
}
}
| |
/*
* Copyright 2015 herd contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.finra.herd.tools.common.databridge;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.File;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.io.FileUtils;
import org.apache.log4j.Logger;
import org.junit.After;
import org.junit.Before;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.test.context.ContextConfiguration;
import org.finra.herd.core.AbstractCoreTest;
import org.finra.herd.core.Command;
import org.finra.herd.model.dto.ManifestFile;
import org.finra.herd.model.dto.S3FileTransferRequestParamsDto;
import org.finra.herd.model.dto.S3FileTransferResultsDto;
import org.finra.herd.model.dto.UploaderInputManifestDto;
import org.finra.herd.model.jpa.StorageEntity;
import org.finra.herd.model.api.xml.Attribute;
import org.finra.herd.model.api.xml.BusinessObjectData;
import org.finra.herd.model.api.xml.BusinessObjectDataKey;
import org.finra.herd.model.api.xml.StorageFile;
import org.finra.herd.service.S3Service;
import org.finra.herd.tools.common.config.DataBridgeTestSpringModuleConfig;
/**
* This is an abstract base class that provides useful methods for DAO test drivers.
*/
@ContextConfiguration(classes = DataBridgeTestSpringModuleConfig.class, inheritLocations = false)
public abstract class AbstractDataBridgeTest extends AbstractCoreTest
{
private static Logger logger = Logger.getLogger(AbstractDataBridgeTest.class);
protected static final String WEB_SERVICE_HOSTNAME = "testWebServiceHostname";
protected static final Integer WEB_SERVICE_PORT = 80;
protected static final Integer WEB_SERVICE_HTTPS_PORT = 1234;
protected static final String WEB_SERVICE_HTTPS_USERNAME = "testHttpsUsername";
protected static final String WEB_SERVICE_HTTPS_PASSWORD = "testHttpsPassword";
public static final String HTTP_PROXY_HOST = "testProxyHostname";
public static final Integer HTTP_PROXY_PORT = 80;
protected static final String S3_BUCKET_NAME = "testBucket";
protected static final String S3_ACCESS_KEY = "testAccessKey";
protected static final String S3_SECRET_KEY = "testSecretKey";
protected static final String S3_ENDPOINT_US_STANDARD = "s3.amazonaws.com";
protected static final String RANDOM_SUFFIX = getRandomSuffix();
protected static final String TEST_NAMESPACE = "APP_A";
protected static final String TEST_BUSINESS_OBJECT_DEFINITION = "NEW_ORDERS";
protected static final String TEST_BUSINESS_OBJECT_FORMAT_USAGE = "PRC";
protected static final String TEST_BUSINESS_OBJECT_FORMAT_FILE_TYPE = "TXT";
protected static final Integer TEST_BUSINESS_OBJECT_FORMAT_VERSION = 0;
protected static final String TEST_BUSINESS_OBJECT_FORMAT_PARTITION_KEY = "PROCESS_DATE";
protected static final String TEST_PARENT_PARTITION_VALUE = "2014-07-09" + RANDOM_SUFFIX;
protected static final String TEST_PARTITION_VALUE = "2014-07-10" + RANDOM_SUFFIX;
protected static final String TEST_SUB_PARTITION_VALUE_1 = "2014-07-11" + RANDOM_SUFFIX;
protected static final String TEST_SUB_PARTITION_VALUE_2 = "2014-07-12" + RANDOM_SUFFIX;
protected static final String TEST_SUB_PARTITION_VALUE_3 = "2014-07-13" + RANDOM_SUFFIX;
protected static final String TEST_SUB_PARTITION_VALUE_4 = "2014-07-14" + RANDOM_SUFFIX;
protected static final List<String> TEST_SUB_PARTITION_VALUES =
Arrays.asList(TEST_SUB_PARTITION_VALUE_1, TEST_SUB_PARTITION_VALUE_2, TEST_SUB_PARTITION_VALUE_3, TEST_SUB_PARTITION_VALUE_4);
protected static final Integer TEST_DATA_VERSION_V0 = 0;
protected static final Integer TEST_DATA_VERSION_V1 = 1;
protected static final List<String> LOCAL_FILES =
Arrays.asList("foo1.dat", "Foo2.dat", "FOO3.DAT", "folder/foo3.dat", "folder/foo2.dat", "folder/foo1.dat");
protected static final String LOCAL_FILE = "foo.dat";
protected static final List<String> S3_DIRECTORY_MARKERS = Arrays.asList("", "folder");
protected static final String ATTRIBUTE_NAME_1_MIXED_CASE = "Attribute Name 1";
protected static final String ATTRIBUTE_VALUE_1 = "Attribute Value 1";
protected static final String ATTRIBUTE_NAME_2_MIXED_CASE = "Attribute Name 2";
protected static final String ATTRIBUTE_VALUE_2 = " Attribute Value 2 ";
protected static final String ATTRIBUTE_NAME_3_MIXED_CASE = "Attribute Name 3";
protected static final String BLANK_TEXT = " \n \t\t ";
protected static final String NAMESPACE_CD = "UT_Namespace" + RANDOM_SUFFIX;
protected static final String STRING_VALUE = "UT_SomeText" + RANDOM_SUFFIX;
protected static List<ManifestFile> testManifestFiles;
protected static final String S3_TEST_PARENT_PATH_V0 =
"app-a/exchange-a/prc/txt/new-orders/frmt-v0/data-v" + TEST_DATA_VERSION_V0 + "/process-date=" + TEST_PARENT_PARTITION_VALUE + "/spk1=" +
TEST_SUB_PARTITION_VALUE_1 + "/spk2=" + TEST_SUB_PARTITION_VALUE_2 + "/spk3=" + TEST_SUB_PARTITION_VALUE_3 + "/spk4=" + TEST_SUB_PARTITION_VALUE_4;
protected static final String S3_TEST_PARENT_PATH_V1 =
"app-a/exchange-a/prc/txt/new-orders/frmt-v0/data-v" + TEST_DATA_VERSION_V1 + "/process-date=" + TEST_PARENT_PARTITION_VALUE + "/spk1=" +
TEST_SUB_PARTITION_VALUE_1 + "/spk2=" + TEST_SUB_PARTITION_VALUE_2 + "/spk3=" + TEST_SUB_PARTITION_VALUE_3 + "/spk4=" + TEST_SUB_PARTITION_VALUE_4;
protected static final String S3_TEST_PATH_V0 =
"app-a/exchange-a/prc/txt/new-orders/frmt-v0/data-v" + TEST_DATA_VERSION_V0 + "/process-date=" + TEST_PARTITION_VALUE + "/spk1=" +
TEST_SUB_PARTITION_VALUE_1 +
"/spk2=" + TEST_SUB_PARTITION_VALUE_2 + "/spk3=" + TEST_SUB_PARTITION_VALUE_3 + "/spk4=" + TEST_SUB_PARTITION_VALUE_4;
protected static final String S3_TEST_PATH_V1 =
"app-a/exchange-a/prc/txt/new-orders/frmt-v0/data-v" + TEST_DATA_VERSION_V1 + "/process-date=" + TEST_PARTITION_VALUE + "/spk1=" +
TEST_SUB_PARTITION_VALUE_1 +
"/spk2=" + TEST_SUB_PARTITION_VALUE_2 + "/spk3=" + TEST_SUB_PARTITION_VALUE_3 + "/spk4=" + TEST_SUB_PARTITION_VALUE_4;
protected static final String S3_SIMPLE_TEST_PATH = "app-a/exchange-a/prc/txt/new-orders/frmt-v0/data-v0/process-date=2014-01-31";
/**
* The counter value to generate ID values unique across multiple threads.
*/
protected AtomicInteger counter = new AtomicInteger(0);
protected static final Path LOCAL_TEMP_PATH_INPUT = Paths.get(System.getProperty("java.io.tmpdir"), "herd-databridge-test", "input");
protected static final Path LOCAL_TEMP_PATH_OUTPUT = Paths.get(System.getProperty("java.io.tmpdir"), "herd-databridge-test", "output");
@Autowired
protected ApplicationContext applicationContext;
/**
* Provide easy access to the S3Service for all test methods.
*/
@Autowired
protected S3Service s3Service;
/**
* Sets up the test environment.
*/
@Before
public void setupEnv() throws IOException
{
if (testManifestFiles == null)
{
testManifestFiles = getManifestFilesFromFileNames(LOCAL_FILES, FILE_SIZE_1_KB);
}
// Create local temp directories.
LOCAL_TEMP_PATH_INPUT.toFile().mkdirs();
LOCAL_TEMP_PATH_OUTPUT.toFile().mkdirs();
}
/**
* Gets a list of manifest files from a list of file names.
*
* @param fileNames the list of file names.
* @param fileSizeBytes the file size in bytes for each manifest file.
*
* @return the list of manifest files.
*/
protected List<ManifestFile> getManifestFilesFromFileNames(List<String> fileNames, long fileSizeBytes)
{
List<ManifestFile> manifestFiles = new ArrayList<>();
for (int i = 0; i < fileNames.size(); i++)
{
String fileName = fileNames.get(i);
ManifestFile manifestFile = new ManifestFile();
manifestFiles.add(manifestFile);
manifestFile.setFileName(fileName);
manifestFile.setRowCount((long) i);
manifestFile.setFileSizeBytes(fileSizeBytes);
}
return manifestFiles;
}
/**
* Cleans up the test environment.
*/
@After
public void cleanEnv() throws IOException
{
// Clean up the local directory.
FileUtils.deleteDirectory(LOCAL_TEMP_PATH_INPUT.toFile());
FileUtils.deleteDirectory(LOCAL_TEMP_PATH_OUTPUT.toFile());
// Clean up the destination S3 folders.
cleanupS3();
}
/**
* Returns an S3 file transfer request parameters DTO instance initialized using hard coded test values. This DTO is required for testing and clean up
* activities.
*
* @return the newly created S3 file transfer request parameters DTO
*/
protected S3FileTransferRequestParamsDto getTestS3FileTransferRequestParamsDto()
{
return getTestS3FileTransferRequestParamsDto(null);
}
/**
* Returns an S3 file transfer request parameters DTO instance initialized using hard coded test values. This DTO is required for testing and clean up
* activities.
*
* @param s3KeyPrefix the S3 key prefix
*
* @return the newly created S3 file transfer request parameters DTO
*/
protected S3FileTransferRequestParamsDto getTestS3FileTransferRequestParamsDto(String s3KeyPrefix)
{
return S3FileTransferRequestParamsDto.builder().s3BucketName(S3_BUCKET_NAME).s3KeyPrefix(s3KeyPrefix).s3AccessKey(S3_ACCESS_KEY)
.s3SecretKey(S3_SECRET_KEY).httpProxyHost(HTTP_PROXY_HOST).httpProxyPort(HTTP_PROXY_PORT).localPath(LOCAL_TEMP_PATH_INPUT.toString()).build();
}
/**
* Runs a data bridge application (i.e. uploader or downloader) with the specified arguments and validates the response against an expected return value. An
* optional "no logging class" can also be specified.
*
* @param dataBridgeApp the Data Bridge application.
* @param args the application arguments.
* @param noLoggingClass an optional class that will have logging turned off.
* @param expectedReturnValue the expected application return value.
*
* @throws Exception if any errors were found during the execution of the application.
*/
protected void runDataBridgeAndCheckReturnValue(DataBridgeApp dataBridgeApp, String[] args, Class<?> noLoggingClass,
DataBridgeApp.ReturnValue expectedReturnValue) throws Exception
{
runDataBridgeAndCheckReturnValue(dataBridgeApp, args, noLoggingClass, expectedReturnValue, null);
}
/**
* Runs a data bridge application (i.e. uploader or downloader) with the specified arguments and verifies that an expected exception will be thrown. An
* optional "no logging class" can also be specified.
*
* @param dataBridgeApp the Data Bridge application.
* @param args the application arguments.
* @param noLoggingClass an optional class that will have logging turned off.
* @param expectedException an instance of an expected exception that should be thrown. If this is an instance of HttpErrorResponseException, then the
* response status will also be compared.
*
* @throws Exception if any errors were found during the execution of the application.
*/
protected void runDataBridgeAndCheckReturnValue(DataBridgeApp dataBridgeApp, String[] args, Class<?> noLoggingClass, Object expectedException)
throws Exception
{
runDataBridgeAndCheckReturnValue(dataBridgeApp, args, noLoggingClass, null, expectedException);
}
/**
* Runs a data bridge application (i.e. uploader or downloader) with the specified arguments and validates the response against an expected return value. An
* optional "no logging class" can also be specified.
*
* @param dataBridgeApp the Data Bridge application.
* @param args the application arguments.
* @param noLoggingClass an optional class that will have logging turned off.
* @param expectedReturnValue the expected application return value or null if an exception is expected.
* @param expectedException an instance of an expected exception that should be thrown or null if no exception is expected. If this is null, then an
* expected return value should be populated. If this is an instance of HttpErrorResponseException, then the response status will also be compared.
*
* @throws Exception if any errors were found during the execution of the application.
*/
private void runDataBridgeAndCheckReturnValue(final DataBridgeApp dataBridgeApp, final String[] args, Class<?> noLoggingClass,
final DataBridgeApp.ReturnValue expectedReturnValue, final Object expectedException) throws Exception
{
try
{
executeWithoutLogging(noLoggingClass, new Command()
{
@Override
public void execute() throws Exception
{
DataBridgeApp.ReturnValue returnValue = dataBridgeApp.go(args);
if (expectedException != null)
{
fail("Expected exception of class " + expectedException.getClass().getName() + " that was not thrown.");
}
else
{
assertEquals(expectedReturnValue, returnValue);
assertEquals(expectedReturnValue.getReturnCode(), returnValue.getReturnCode());
}
}
});
}
catch (Exception ex)
{
if (expectedException != null)
{
if (!(ex.getClass().equals(expectedException.getClass())))
{
logger.error("Error running Data Bridge.", ex);
fail("Expected exception with class " + expectedException.getClass().getName() + ", but got an exception with class " +
ex.getClass().getName());
}
if (ex instanceof HttpErrorResponseException)
{
// This will ensure the returned status code matches what we are expecting.
HttpErrorResponseException httpErrorResponseException = (HttpErrorResponseException) ex;
HttpErrorResponseException expectedHttpErrorResponseException = (HttpErrorResponseException) expectedException;
assertTrue("Expecting HTTP response status of " + expectedHttpErrorResponseException.getStatusCode() + ", but got " +
httpErrorResponseException.getStatusCode(), expectedException.equals(httpErrorResponseException));
}
}
else
{
// Throw the original exception, since we are not expecting any exception.
throw ex;
}
}
}
/**
* Validates actualBusinessObjectData contents against specified arguments and expected (hard coded) test values.
*
* @param expectedDataVersion the expected business object data version
* @param actualBusinessObjectData the BusinessObjectData object instance to be validated
*/
protected void assertBusinessObjectData(Integer expectedDataVersion, BusinessObjectData actualBusinessObjectData)
{
assertBusinessObjectData(expectedDataVersion, getTestAttributes(), getTestBusinessObjectDataParents(), actualBusinessObjectData);
}
/**
* Validates actualBusinessObjectData contents against specified arguments and expected (hard coded) test values.
*
* @param expectedDataVersion the expected business object data version
* @param expectedAttributes the expected attributes
* @param expectedParents the expected business object data parents
* @param actualBusinessObjectData the BusinessObjectData object instance to be validated
*/
protected void assertBusinessObjectData(Integer expectedDataVersion, List<Attribute> expectedAttributes, List<BusinessObjectDataKey> expectedParents,
BusinessObjectData actualBusinessObjectData)
{
assertNotNull(actualBusinessObjectData);
assertEquals(TEST_BUSINESS_OBJECT_DEFINITION, actualBusinessObjectData.getBusinessObjectDefinitionName());
assertEquals(TEST_BUSINESS_OBJECT_FORMAT_USAGE, actualBusinessObjectData.getBusinessObjectFormatUsage());
assertEquals(TEST_BUSINESS_OBJECT_FORMAT_FILE_TYPE, actualBusinessObjectData.getBusinessObjectFormatFileType());
assertEquals(TEST_BUSINESS_OBJECT_FORMAT_VERSION.intValue(), actualBusinessObjectData.getBusinessObjectFormatVersion());
assertEquals(TEST_BUSINESS_OBJECT_FORMAT_PARTITION_KEY, actualBusinessObjectData.getPartitionKey());
assertEquals(TEST_PARTITION_VALUE, actualBusinessObjectData.getPartitionValue());
assertEquals(expectedDataVersion.intValue(), actualBusinessObjectData.getVersion());
assertEquals(1, actualBusinessObjectData.getStorageUnits().size());
assertEquals(StorageEntity.MANAGED_STORAGE, actualBusinessObjectData.getStorageUnits().get(0).getStorage().getName());
assertEquals(testManifestFiles.size(), actualBusinessObjectData.getStorageUnits().get(0).getStorageFiles().size());
assertEquals(expectedAttributes, actualBusinessObjectData.getAttributes());
assertEquals(expectedParents, actualBusinessObjectData.getBusinessObjectDataParents());
}
/**
* Returns a next ID value to be unique across multiple threads.
*
* @return the next ID value
*/
protected int getNextUniqueIndex()
{
return counter.getAndIncrement();
}
/**
* Cleans up the destination S3 key prefixes used by the uploader unit tests.
*/
protected void cleanupS3()
{
// Delete the test business object data versions.
S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto = getTestS3FileTransferRequestParamsDto();
for (String s3KeyPrefix : new String[] {S3_TEST_PARENT_PATH_V0, S3_TEST_PARENT_PATH_V1, S3_TEST_PATH_V0, S3_TEST_PATH_V1, S3_SIMPLE_TEST_PATH})
{
s3FileTransferRequestParamsDto.setS3KeyPrefix(s3KeyPrefix);
if (!s3Service.listDirectory(s3FileTransferRequestParamsDto).isEmpty())
{
s3Service.deleteDirectory(s3FileTransferRequestParamsDto);
}
}
}
/**
* Creates local test data files in LOCAL_TEMP_PATH_INPUT directory.
*
* @param localPath the local path relative to which the test data files will be created
* @param manifestFiles the list of the test data files
*/
protected void createTestDataFiles(Path localPath, List<ManifestFile> manifestFiles) throws Exception
{
// Create local test files.
for (ManifestFile manifestFile : manifestFiles)
{
createLocalFile(localPath.toString(), manifestFile.getFileName(), manifestFile.getFileSizeBytes());
}
}
/**
* Creates and uploads to S3 test data files.
*
* @param s3KeyPrefix the destination S3 key prefix
*/
protected void uploadTestDataFilesToS3(String s3KeyPrefix) throws Exception
{
uploadTestDataFilesToS3(s3KeyPrefix, testManifestFiles);
}
/**
* Creates locally and uploads to S3 the specified list of test data files.
*
* @param s3KeyPrefix the destination S3 key prefix
* @param manifestFiles the list of test data files to be created and uploaded to S3
*/
protected void uploadTestDataFilesToS3(String s3KeyPrefix, List<ManifestFile> manifestFiles) throws Exception
{
uploadTestDataFilesToS3(s3KeyPrefix, manifestFiles, new ArrayList<String>());
}
/**
* Creates locally specified list of files and uploads them to the test S3 bucket. This method also creates 0 byte S3 directory markers relative to the s3
* key prefix.
*
* @param s3KeyPrefix the destination S3 key prefix
* @param manifestFiles the list of test data files to be created and uploaded to S3
* @param directoryPaths the list of directory paths to be created in S3 relative to the S3 key prefix
* <p/>
* TODO: This method is basically a copy of prepareTestS3Files() from BusinessObjectDataServiceCreateBusinessObjectDataTest.java, so they both should be
* replaced by a common helper method in AbstractDaoTest.java (a common parent class).
*/
protected void uploadTestDataFilesToS3(String s3KeyPrefix, List<ManifestFile> manifestFiles, List<String> directoryPaths) throws Exception
{
// Create local test data files.
createTestDataFiles(LOCAL_TEMP_PATH_INPUT, manifestFiles);
// Since the S3 key prefix represents a directory, we add a trailing '/' character to it.
String s3KeyPrefixWithTrailingSlash = s3KeyPrefix + "/";
// Upload test file to S3.
S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto = getTestS3FileTransferRequestParamsDto();
s3FileTransferRequestParamsDto.setS3KeyPrefix(s3KeyPrefixWithTrailingSlash);
s3FileTransferRequestParamsDto.setLocalPath(LOCAL_TEMP_PATH_INPUT.toString());
s3FileTransferRequestParamsDto.setRecursive(true);
S3FileTransferResultsDto results = s3Service.uploadDirectory(s3FileTransferRequestParamsDto);
// Validate the transfer result.
assertEquals(Long.valueOf(manifestFiles.size()), results.getTotalFilesTransferred());
// Create 0 byte S3 directory markers.
for (String directoryPath : directoryPaths)
{
// Create 0 byte directory marker.
s3FileTransferRequestParamsDto.setS3KeyPrefix(s3KeyPrefix + "/" + directoryPath);
s3Service.createDirectory(s3FileTransferRequestParamsDto);
}
// Restore the S3 key prefix value.
s3FileTransferRequestParamsDto.setS3KeyPrefix(s3KeyPrefixWithTrailingSlash);
// Validate the uploaded S3 files and created directory markers, if any.
List<StorageFile> actualS3Files = s3Service.listDirectory(s3FileTransferRequestParamsDto);
assertEquals(manifestFiles.size() + directoryPaths.size(), actualS3Files.size());
}
/**
* Serializes provided manifest instance as JSON output, written to a file in the specified directory.
*
* @param baseDir the local parent directory path, relative to which the manifest file should be created
* @param manifest the manifest instance to serialize
*
* @return the resulting file
*/
protected File createManifestFile(String baseDir, Object manifest) throws IOException
{
// Create result file object
Path resultFilePath = Paths.get(baseDir, String.format("manifest-%d.json", getNextUniqueIndex()));
File resultFile = new File(resultFilePath.toString());
// Convert Java object to JSON format
ObjectMapper mapper = new ObjectMapper();
mapper.writeValue(resultFile, manifest);
return resultFile;
}
/**
* Returns an instance of the uploader input manifest object initialized per hard coded test values.
*
* @return the resulting UploaderManifest instance
*/
protected UploaderInputManifestDto getTestUploaderInputManifestDto()
{
return getTestUploaderInputManifestDto(TEST_PARTITION_VALUE, TEST_SUB_PARTITION_VALUES, true);
}
/**
* Returns an instance of the uploader input manifest object initialized per hard coded test values.
*
* @return the resulting UploaderManifest instance
*/
protected UploaderInputManifestDto getTestUploaderInputManifestDto(String partitionValue, List<String> subPartitionValues, boolean includeParents)
{
UploaderInputManifestDto manifest = new UploaderInputManifestDto();
manifest.setNamespace(TEST_NAMESPACE);
manifest.setBusinessObjectDefinitionName(TEST_BUSINESS_OBJECT_DEFINITION);
manifest.setBusinessObjectFormatUsage(TEST_BUSINESS_OBJECT_FORMAT_USAGE);
manifest.setBusinessObjectFormatFileType(TEST_BUSINESS_OBJECT_FORMAT_FILE_TYPE);
manifest.setBusinessObjectFormatVersion(TEST_BUSINESS_OBJECT_FORMAT_VERSION.toString());
manifest.setPartitionKey(TEST_BUSINESS_OBJECT_FORMAT_PARTITION_KEY);
manifest.setPartitionValue(partitionValue);
manifest.setSubPartitionValues(subPartitionValues);
manifest.setManifestFiles(testManifestFiles);
// Add attributes to the uploader manifest.
HashMap<String, String> attributes = new HashMap<>();
manifest.setAttributes(attributes);
for (Attribute attribute : getTestAttributes())
{
attributes.put(attribute.getName(), attribute.getValue());
}
// Add business object data parents.
if (includeParents)
{
manifest.setBusinessObjectDataParents(getTestBusinessObjectDataParents());
}
return manifest;
}
/**
* Returns a list of business object data attributes created using hard coded test values.
*
* @return the newly created list of business object data attributes
*/
protected List<Attribute> getTestAttributes()
{
List<Attribute> attributes = new ArrayList<>();
Attribute attribute1 = new Attribute();
attributes.add(attribute1);
attribute1.setName(ATTRIBUTE_NAME_1_MIXED_CASE);
attribute1.setValue(ATTRIBUTE_VALUE_1);
Attribute attribute2 = new Attribute();
attributes.add(attribute2);
attribute2.setName(ATTRIBUTE_NAME_2_MIXED_CASE);
attribute2.setValue(ATTRIBUTE_VALUE_2);
return attributes;
}
/**
* Returns a list of business object data parents created using hard coded test values.
*
* @return the newly created list of business object data parents.
*/
protected List<BusinessObjectDataKey> getTestBusinessObjectDataParents()
{
List<BusinessObjectDataKey> businessObjectDataparents = new ArrayList<>();
BusinessObjectDataKey parent1 = new BusinessObjectDataKey();
businessObjectDataparents.add(parent1);
parent1.setNamespace(TEST_NAMESPACE);
parent1.setBusinessObjectDefinitionName(TEST_BUSINESS_OBJECT_DEFINITION);
parent1.setBusinessObjectFormatUsage(TEST_BUSINESS_OBJECT_FORMAT_USAGE);
parent1.setBusinessObjectFormatFileType(TEST_BUSINESS_OBJECT_FORMAT_FILE_TYPE);
parent1.setBusinessObjectFormatVersion(TEST_BUSINESS_OBJECT_FORMAT_VERSION);
parent1.setPartitionValue(TEST_PARENT_PARTITION_VALUE);
parent1.setSubPartitionValues(TEST_SUB_PARTITION_VALUES);
parent1.setBusinessObjectDataVersion(TEST_DATA_VERSION_V0);
BusinessObjectDataKey parent2 = new BusinessObjectDataKey();
businessObjectDataparents.add(parent2);
parent2.setNamespace(TEST_NAMESPACE);
parent2.setBusinessObjectDefinitionName(TEST_BUSINESS_OBJECT_DEFINITION);
parent2.setBusinessObjectFormatUsage(TEST_BUSINESS_OBJECT_FORMAT_USAGE);
parent2.setBusinessObjectFormatFileType(TEST_BUSINESS_OBJECT_FORMAT_FILE_TYPE);
parent2.setBusinessObjectFormatVersion(TEST_BUSINESS_OBJECT_FORMAT_VERSION);
parent2.setPartitionValue(TEST_PARENT_PARTITION_VALUE);
parent2.setSubPartitionValues(TEST_SUB_PARTITION_VALUES);
parent2.setBusinessObjectDataVersion(TEST_DATA_VERSION_V1);
return businessObjectDataparents;
}
/**
* Uploads and registers a version if of the test business object data that will be used as a parent.
*
* @param dataBridgeWebClient the databridge web client instance
*/
protected void uploadAndRegisterTestDataParents(DataBridgeWebClient dataBridgeWebClient) throws Exception
{
uploadAndRegisterTestDataParent(S3_TEST_PARENT_PATH_V0, dataBridgeWebClient);
uploadAndRegisterTestDataParent(S3_TEST_PARENT_PATH_V1, dataBridgeWebClient);
}
/**
* Uploads and registers a version if of the test business object data that will be used as a parent.
*
* @param s3KeyPrefix the destination S3 key prefix that must comply with the S3 naming conventions including the expected data version value
* @param dataBridgeWebClient the databridge web client instance
*/
protected void uploadAndRegisterTestDataParent(String s3KeyPrefix, DataBridgeWebClient dataBridgeWebClient) throws Exception
{
uploadTestDataFilesToS3(s3KeyPrefix, testManifestFiles, new ArrayList<String>());
UploaderInputManifestDto uploaderInputManifestDto = getTestUploaderInputManifestDto(TEST_PARENT_PARTITION_VALUE, TEST_SUB_PARTITION_VALUES, false);
S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto = getTestS3FileTransferRequestParamsDto();
s3FileTransferRequestParamsDto.setS3KeyPrefix(s3KeyPrefix + "/");
dataBridgeWebClient.registerBusinessObjectData(uploaderInputManifestDto, s3FileTransferRequestParamsDto, StorageEntity.MANAGED_STORAGE, true);
// Clean up the local input directory used for the test data files upload.
FileUtils.cleanDirectory(LOCAL_TEMP_PATH_INPUT.toFile());
}
}
| |
package com.nobullet.graph;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
/**
* Graph vertex. Not thread safe.
*/
class Vertex implements Cloneable {
Key key;
Map<Vertex, Edge> adjacent;
Map<Vertex, Edge> adjacentUnmodifiable;
Optional<Object> data;
Optional<VertexPosition> position;
/**
* Constructs vertex.
*
* @param key Vertex unique key.
*/
Vertex(Key key) {
this(key, null, null, Collections.emptyMap());
}
/**
* Constructs vertex with given data.
*
* @param key Vertex unique key.
* @param data Vertex data.
*/
Vertex(Key key, Object data) {
this(key, null, data, Collections.emptyMap());
}
/**
* Constructs vertex with given data.
*
* @param key Vertex unique key.
* @param position Position.
* @param data Vertex data.
*/
Vertex(Key key, VertexPosition position, Object data) {
this(key, position, data, Collections.emptyMap());
}
/**
* Copy constructor. Copies everything but the edges.
*
* @param source Source to copy.
*/
Vertex(Vertex source) {
this(source.getKey(), source.getPosition().orElseGet(() -> null), source.getData().orElseGet(() -> null));
}
/**
* Constructs vertex with given data.
*
* @param key Vertex unique key.
* @param data Vertex data.
* @param adjacent Adjacent edges.
*/
Vertex(Key key, VertexPosition position, Object data, Map<Vertex, Edge> adjacent) {
this.key = key;
this.adjacent = new HashMap<>(adjacent);
this.adjacentUnmodifiable = Collections.unmodifiableMap(this.adjacent);
this.position = Optional.ofNullable(position);
this.data = Optional.ofNullable(data);
}
/**
* Collection of outgoing edges.
*
* @return Outgoing edges.
*/
Collection<Edge> getOutgoingEdges() {
return adjacentUnmodifiable.values();
}
/**
* Number of outgoing edges.
*
* @return Number of outgoing edges.
*/
int getOutgoingEdgesNumber() {
return adjacent.size();
}
/**
* Adjacent vertices.
*
* @return Adjacent vertices.
*/
Set<Vertex> getAdjacentVertices() {
return adjacentUnmodifiable.keySet();
}
/**
* Adjacent vertices keys.
*
* @return Adjacent vertices keys.
*/
Set<Key> getAdjacentVerticesKeys() {
return adjacentUnmodifiable.keySet().stream().map(vertex -> vertex.getKey()).collect(Collectors.toSet());
}
/**
* Removes edge to vertex.
*
* @param to To vertex.
* @return Current vertex.
*/
Vertex removeEdgeTo(Vertex to) {
Edge existing = this.adjacent.get(to);
if (existing != null) {
existing.clear();
this.adjacent.remove(to);
}
return this;
}
/**
* Checks if the vertex has edge to other vertex.
*
* @param to To vertex.
* @return Whether the vertex has edge to other vertex.
*/
boolean hasEdge(Vertex to) {
return adjacent.containsKey(to);
}
/**
* Returns an edge to other vertex if it exists. Or returns null if it doesn't.
*
* @param to To vertex.
* @return Edge to other vertex or null.
*/
Edge getEdge(Vertex to) {
return adjacent.get(to);
}
/**
* Adds edge to other vertex with given cost if there was no edge. Updates cost if edge previously existed.
*
* @param to To vertex.
* @param cost Cost.
* @return Current vertex.
*/
Vertex addEdge(Vertex to, double cost) {
addEdge(to, cost, null);
return this;
}
/**
* Adds edge to other vertex with given cost if there was no edge. Updates cost if edge previously existed.
*
* @param to To vertex.
* @param cost Cost.
* @param data Vertex data. Is not updated if edge exists.
* @return Current vertex.
*/
Vertex addEdge(Vertex to, double cost, Object data) {
if (equals(to)) {
throw new IllegalStateException("Can't add an edge to itself.");
}
Edge edge = this.adjacent.get(to);
if (edge == null) {
edge = new Edge(this, to, cost, data);
this.adjacent.put(to, edge);
} else {
edge.setCost(cost);
}
return this;
}
/**
* Returns vertex key.
*
* @return Vertex key.
*/
Key getKey() {
return key;
}
/**
* Returns position for vertex.
*
* @return Position for vertex.
*/
Optional<VertexPosition> getPosition() {
return position;
}
/**
* Sets position for vertex.
*
* @param position Position.
* @return Current vertex.
*/
Vertex setPosition(VertexPosition position) {
this.position = Optional.ofNullable(position);
return this;
}
/**
* Gets the data for vertex.
*
* @return Data for vertex.
*/
Optional<Object> getData() {
return data;
}
/**
* Sets the data for vertex.
*
* @param data Data.
* @return Current vertex.
*/
Vertex setData(Object data) {
this.data = Optional.ofNullable(data);
return this;
}
@Override
public int hashCode() {
int hash = 7;
hash = 43 * hash + Objects.hashCode(this.key);
return hash;
}
@Override
public boolean equals(Object obj) {
if (obj == null || getClass() != obj.getClass()) {
return false;
}
final Vertex other = (Vertex) obj;
return Objects.equals(this.key, other.key);
}
@Override
public Object clone() throws CloneNotSupportedException {
return new Vertex(this);
}
/**
* Clears the vertex.
*/
void clear() {
// Clean up edges.
for (Map.Entry<Vertex, Edge> vertexEntry : this.adjacent.entrySet()) {
vertexEntry.getValue().clear();
}
this.adjacent.clear();
this.data = Optional.empty();
this.position = Optional.empty();
}
/**
* Calculates a distance to given vertex.
*
* @param vertex Other vertex.
* @return Distance as an {@link Optional} of double.
*/
Optional<Double> distanceTo(Vertex vertex) {
if (position.isPresent() && vertex.getPosition().isPresent()) {
return position.get().distanceTo(vertex.getPosition().get());
}
return Optional.empty();
}
@Override
public String toString() {
return "{key:\"" + key + "\"}";
}
}
| |
/**
* Copyright 2009 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.waveprotocol.wave.model.document.operation.util;
import org.waveprotocol.wave.model.document.operation.util.ImmutableUpdateMap.AttributeUpdate;
import org.waveprotocol.wave.model.util.Preconditions;
import java.util.AbstractMap;
import java.util.AbstractSet;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
public abstract class ImmutableStateMap<T extends ImmutableStateMap<T, U>, U extends UpdateMap>
extends AbstractMap<String, String> {
/**
* A name-value pair representing an attribute.
*/
public static final class Attribute implements Map.Entry<String,String> {
// TODO: This class can be simplified greatly if
// AbstractMap.SimpleImmutableEntry from Java 6 can be used.
private final String name;
private final String value;
/**
* Creates an attribute with a map entry representing an attribute
* name-value pair.
*
* @param entry The attribute's name-value pair.
*/
public Attribute(Map.Entry<String,String> entry) {
this(entry.getKey(), entry.getValue());
}
/**
* Creates an attribute given a name-value pair.
*
* @param name The name of the attribute.
* @param value The value of the attribute.
*/
public Attribute(String name, String value) {
Preconditions.checkNotNull(name, "Null attribute name");
Preconditions.checkNotNull(value, "Null attribute value");
this.name = name;
this.value = value;
}
@Override
public String getKey() {
return name;
}
@Override
public String getValue() {
return value;
}
@Override
public String setValue(String value) {
throw new UnsupportedOperationException("Attempt to modify an immutable map entry.");
}
@Override
public boolean equals(Object o) {
if (!(o instanceof Map.Entry)) {
return false;
}
Map.Entry<?,?> entry = (Map.Entry<?,?>) o;
return ((name == null) ? entry.getKey() == null : name.equals(entry.getKey())) &&
((value == null) ? entry.getValue() == null : value.equals(entry.getValue()));
}
@Override
public int hashCode() {
return ((name == null) ? 0 : name.hashCode()) ^
((value == null) ? 0 : value.hashCode());
}
@Override
public String toString() {
return "Attribute(" + name + "=" + value + ")";
}
}
private final List<Attribute> attributes;
private final Set<Map.Entry<String,String>> entrySet =
new AbstractSet<Map.Entry<String,String>>() {
@Override
public Iterator<Map.Entry<String,String>> iterator() {
return new Iterator<Map.Entry<String,String>>() {
private Iterator<Attribute> iterator = attributes.iterator();
public boolean hasNext() {
return iterator.hasNext();
}
public Attribute next() {
return iterator.next();
}
public void remove() {
throw new UnsupportedOperationException("Attempt to modify an immutable set.");
}
};
}
@Override
public int size() {
return attributes.size();
}
};
/**
* Creates a new T object containing no T.
*/
public ImmutableStateMap() {
attributes = Collections.emptyList();
}
protected static final Comparator<Attribute> comparator = new Comparator<Attribute>() {
@Override
public int compare(Attribute a, Attribute b) {
return a.name.compareTo(b.name);
}
};
/**
* Constructs a new <code>T</code> object with the T
* specified by the given mapping.
*
* @param map The mapping of attribute names to attribute values.
*/
public ImmutableStateMap(Map<String,String> map) {
this.attributes = attributeListFromMap(map);
}
public ImmutableStateMap(String ... pairs) {
Preconditions.checkArgument(pairs.length % 2 == 0, "Pairs must come in groups of two");
Map<String, String> map = new HashMap<String, String>();
for (int i = 0; i < pairs.length; i += 2) {
Preconditions.checkNotNull(pairs[i], "Null key");
Preconditions.checkNotNull(pairs[i + 1], "Null value");
if (map.containsKey(pairs[i])) {
Preconditions.illegalArgument("Duplicate key: " + pairs[i]);
}
map.put(pairs[i], pairs[i + 1]);
}
this.attributes = attributeListFromMap(map);
}
private List<Attribute> attributeListFromMap(Map<String, String> map) {
ArrayList<Attribute> attributeList = new ArrayList<Attribute>(map.size());
for (Map.Entry<String, String> entry : map.entrySet()) {
if (entry.getKey() == null || entry.getValue() == null) {
Preconditions.nullPointer("This map does not allow null keys or values");
}
attributeList.add(new Attribute(entry));
}
Collections.sort(attributeList, comparator);
return attributeList;
}
protected ImmutableStateMap(List<Attribute> attributes) {
this.attributes = attributes;
}
@Override
public Set<Map.Entry<String,String>> entrySet() {
return entrySet;
}
/**
* Returns a <code>T</code> object obtained by applying the update
* specified by the <code>U</code> object into this
* <code>T</code> object.
*
* @param attributeUpdate The update to apply.
* @return A <code>T</code> object obtained by applying the given
* update onto this object.
*/
public T updateWith(U attributeUpdate) {
return updateWith(attributeUpdate, true);
}
public T updateWithNoCompatibilityCheck(U attributeUpdate) {
return updateWith(attributeUpdate, false);
}
private T updateWith(U attributeUpdate, boolean checkCompatibility) {
List<Attribute> newImmutableStateMap = new ArrayList<Attribute>();
Iterator<Attribute> iterator = attributes.iterator();
Attribute nextAttribute = iterator.hasNext() ? iterator.next() : null;
// TODO: Have a slow path when the cast would fail.
List<AttributeUpdate> updates = ((ImmutableUpdateMap<?,?>) attributeUpdate).updates;
for (AttributeUpdate update : updates) {
while (nextAttribute != null) {
int comparison = update.name.compareTo(nextAttribute.name);
if (comparison > 0) {
newImmutableStateMap.add(nextAttribute);
nextAttribute = iterator.hasNext() ? iterator.next() : null;
} else if (comparison < 0) {
if (checkCompatibility && update.oldValue != null) {
Preconditions.illegalArgument(
"Mismatched old value: attempt to update unset attribute with " + update);
}
break;
} else if (comparison == 0) {
if (checkCompatibility && !nextAttribute.value.equals(update.oldValue)) {
Preconditions.illegalArgument(
"Mismatched old value: attempt to update " + nextAttribute + " with " + update);
}
nextAttribute = iterator.hasNext() ? iterator.next() : null;
break;
}
}
if (update.newValue != null) {
newImmutableStateMap.add(new Attribute(update.name, update.newValue));
}
}
if (nextAttribute != null) {
newImmutableStateMap.add(nextAttribute);
while (iterator.hasNext()) {
newImmutableStateMap.add(iterator.next());
}
}
return createFromList(newImmutableStateMap);
}
protected abstract T createFromList(List<Attribute> attributes);
public static void checkAttributesSorted(List<Attribute> attributes) {
Attribute previous = null;
for (Attribute a : attributes) {
Preconditions.checkNotNull(a, "Null attribute");
assert a.name != null;
assert a.value != null;
if (previous != null && previous.name.compareTo(a.name) >= 0) {
Preconditions.illegalArgument(
"Attribute keys not strictly monotonic: " + previous.name + ", " + a.name);
}
previous = a;
}
}
public static <T extends ImmutableStateMap<T, U>, U extends ImmutableUpdateMap<U, ?>> T
updateWithoutCompatibilityCheck(T state, U update) {
// the cast below is required to work with javac from OpenJDK 7
return ((ImmutableStateMap<T, U>) state).updateWith(update, false);
}
}
| |
package com.github.markyc.applicationcenter;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.GridLayout;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import javax.swing.BorderFactory;
import javax.swing.Box;
import javax.swing.BoxLayout;
import javax.swing.ButtonGroup;
import javax.swing.DefaultComboBoxModel;
import javax.swing.JButton;
import javax.swing.JComboBox;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JRadioButton;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
public class AdmitPanel extends JPanel implements CardPanel {
/**
*
*/
private static final long serialVersionUID = -1871018853543598682L;
public static final String CARD_NAME = "admitpanel";
private static final String SELECT_STUDENT = "Please select a student to view their admission status";
private static final String SELECT_STUDENT_LIST = "Please select a student from the list above";
private static final String[] NO_STUDENTS = { "There are no students currently in the database" };
private static final String ACCEPT = "Accept";
private static final String REJECT = "Reject";
private static final String SUBMIT = "Submit";
/* Combo box index is students index + 1
* because the first combobox item is "please select a student..." */
//private List<Student> students;
private Student[] students;
private JComboBox<String> comboBox;
private JPanel studentInfoPanel;
Map<ButtonGroup, String> fields;
private List<ChangeListener> listeners;
public AdmitPanel() {
super();
this.students = new Student[100];//new ArrayList<Student>();
this.fields = new HashMap<ButtonGroup, String>();
this.listeners = new ArrayList<ChangeListener>();
this.comboBox = new JComboBox<String>(NO_STUDENTS);
this.comboBox.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
final JComboBox<?> box = (JComboBox<?>) e.getSource();
// attempt to show the student, showing an empty ("please select a student...") panel on error
try {
//AdmitPanel.this.showStudent(students.get( box.getSelectedIndex() - 1 ));
AdmitPanel.this.showStudent( box.getSelectedIndex() - 1 );
} catch (Exception ex) {
AdmitPanel.this.createEmptyInfoPanel();
}
}
});
this.studentInfoPanel = new JPanel();
this.createEmptyInfoPanel();
this.setLayout(new BorderLayout());
this.add( this.comboBox, BorderLayout.NORTH );
this.add( this.studentInfoPanel, BorderLayout.CENTER );
}
private JPanel createEmptyInfoPanel() {
// Remove all previous Components from the panel
this.studentInfoPanel.removeAll();
this.studentInfoPanel.setLayout(new BoxLayout( this.studentInfoPanel, BoxLayout.X_AXIS ));
this.studentInfoPanel.add(Box.createHorizontalGlue());
this.studentInfoPanel.add( new JLabel(SELECT_STUDENT_LIST) );
this.studentInfoPanel.add(Box.createHorizontalGlue());
// Force the window to redraw itself to reflect the new changes
this.revalidate();
this.repaint();
return this.studentInfoPanel;
}
private void showStudent(int index) {
final Student student = this.students[index];
// remove all previous entries in the fields list
this.fields.clear();
// Remove all previous Components from the panel
this.studentInfoPanel.removeAll();
this.studentInfoPanel.setLayout(new BorderLayout());
/* name and graduate type panel */
JPanel namePanel = new JPanel();
namePanel.setLayout(new BoxLayout(namePanel, BoxLayout.X_AXIS));
// The students name, in large Font
JLabel nameLabel = new JLabel(student.getName());
nameLabel.setFont( nameLabel.getFont().deriveFont(24f) );
// Shows whether the Student is an Undergrad or Postgrad
// If the Student is Postgrad, (Master) or (PHD) will be show
String gradType = student.getClass().getSimpleName();
if (student instanceof Postgrad) gradType += " (" + ((Postgrad) student).getDegree() + ")";
JLabel gradLabel = new JLabel(gradType);
gradLabel.setFont( gradLabel.getFont().deriveFont(16f));
gradLabel.setForeground(Color.GRAY);
// Force the window to redraw itself to reflect the new changes
namePanel.add(nameLabel);
namePanel.add(Box.createHorizontalStrut(10));
namePanel.add(gradLabel);
/* University choice panel */
JPanel universityPanel = new JPanel();
universityPanel.setLayout(new GridLayout(3,1));
/*Iterator<String> it = student.getUniversities().keySet().iterator();
while (it.hasNext()) {*/
String[] universities = student.getUniversities();
for (int i = 0; i < universities.length; i++) {
String university = universities[i];//it.next();
boolean accepted = student.getUniversitiesAccept()[i]; //student.getUniversities().get(university);
JPanel p = new JPanel();
p.setBorder(BorderFactory.createTitledBorder(
BorderFactory.createEtchedBorder(),
university
));
p.setLayout(new GridLayout( 2, 1 ));
ButtonGroup group = new ButtonGroup();
JRadioButton accept = new JRadioButton(ACCEPT);
accept.setActionCommand(ACCEPT);
JRadioButton reject = new JRadioButton(REJECT);
reject.setActionCommand(REJECT);
group.add(accept);
group.add(reject);
this.fields.put(group, university);
if (accepted) accept.setSelected(true);
else reject.setSelected(true);
p.add(accept);
p.add(reject);
universityPanel.add(p);
}
/* submit button */
JButton submit = new JButton( SUBMIT );
submit.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
Iterator<Entry<ButtonGroup, String>> it = AdmitPanel.this.fields.entrySet().iterator();
while (it.hasNext()) {
Entry<ButtonGroup, String> entry = it.next();
entry.getKey().getSelection().getActionCommand();
student.addUniversity(entry.getValue(), ACCEPT.equals(entry.getKey().getSelection().getActionCommand()));
//AdmitPanel.this.students[ AdmitPanel.this.currentStudent ] = s;
}
for (ChangeListener l : AdmitPanel.this.listeners) {
l.stateChanged(new ChangeEvent( AdmitPanel.this ));
}
}
});
// add everything to the panel
this.studentInfoPanel.add( namePanel, BorderLayout.NORTH );
this.studentInfoPanel.add( universityPanel, BorderLayout.CENTER );
this.studentInfoPanel.add( submit, BorderLayout.SOUTH );
// force a redraw of the screen
this.revalidate();
this.repaint();
}
@Override
public String getCardName() {
return CARD_NAME;
}
/**
* Sets the Students this panel contains
* @param students a List of Students who have been inputted into the system
*/
//public void setStudents(List<Student> students) {
public void setStudents(Student[] students) {
// Update students variable
this.students = students;
// get names of all students and store in a string array
// The first combobox item is helper text asking the user to select a Student
String studentNames[] = new String[ this.students.length + 1 ]; //new String[this.students.size() + 1];
studentNames[0] = SELECT_STUDENT;
//for ( int i = 1; i < studentNames.length; i++ ) studentNames[i] = this.students.get(i-1).getName();
for ( int i = 1; i <= this.students.length; i++ ) {
studentNames[i] = this.students[ i - 1 ].getName();
}
// Set model for the combobox to be the students names
this.comboBox.setModel(new DefaultComboBoxModel<String>(studentNames));
}
//public List<Student> getStudents() {
public Student[] getStudents() {
return this.students;
}
public void addListener(ChangeListener c) {
this.listeners.add(c);
}
}
| |
package alien4cloud.deployment;
import static alien4cloud.utils.AlienUtils.safe;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.function.Supplier;
import javax.inject.Inject;
import org.alien4cloud.alm.deployment.configuration.flow.FlowExecutionContext;
import org.alien4cloud.alm.deployment.configuration.flow.FlowExecutor;
import org.alien4cloud.alm.deployment.configuration.model.DeploymentInputs;
import org.alien4cloud.alm.deployment.configuration.model.DeploymentMatchingConfiguration;
import org.alien4cloud.alm.deployment.configuration.model.OrchestratorDeploymentProperties;
import org.alien4cloud.alm.deployment.configuration.model.PreconfiguredInputsConfiguration;
import org.alien4cloud.alm.deployment.configuration.model.SecretCredentialInfo;
import org.alien4cloud.alm.service.ServiceResourceService;
import org.alien4cloud.tosca.catalog.index.ICsarDependencyLoader;
import org.alien4cloud.tosca.catalog.index.IToscaTypeSearchService;
import org.alien4cloud.tosca.model.CSARDependency;
import org.alien4cloud.tosca.model.templates.NodeTemplate;
import org.alien4cloud.tosca.model.templates.PolicyTemplate;
import org.alien4cloud.tosca.model.templates.Topology;
import org.alien4cloud.tosca.model.types.NodeType;
import org.alien4cloud.tosca.topology.TopologyDTOBuilder;
import org.springframework.stereotype.Service;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import alien4cloud.deployment.model.DeploymentSubstitutionConfiguration;
import alien4cloud.model.application.Application;
import alien4cloud.model.application.ApplicationEnvironment;
import alien4cloud.model.application.ApplicationTopologyVersion;
import alien4cloud.model.deployment.DeploymentTopology;
import alien4cloud.model.orchestrators.locations.LocationResourceTemplate;
import alien4cloud.model.orchestrators.locations.PolicyLocationResourceTemplate;
import alien4cloud.model.service.ServiceResource;
import alien4cloud.orchestrators.locations.services.ILocationResourceService;
import alien4cloud.orchestrators.locations.services.LocationResourceTypes;
import alien4cloud.topology.TopologyValidationResult;
import alien4cloud.topology.task.AbstractTask;
import alien4cloud.tosca.context.ToscaContext;
import alien4cloud.tosca.context.ToscaContextual;
import alien4cloud.utils.ReflectionUtil;
/**
* Construct a deployment topology dto for rest api and ui consumption.
*/
@Service
public class DeploymentTopologyDTOBuilder implements IDeploymentTopologyBuilder {
@Inject
private TopologyDTOBuilder topologyDTOBuilder;
@Inject
private FlowExecutor flowExecutor;
@Inject
private ILocationResourceService locationResourceService;
@Inject
private ServiceResourceService serviceResourceService;
@Inject
private IToscaTypeSearchService toscaTypeSearchService;
@Inject
private ICsarDependencyLoader csarDependencyLoader;
@Override
@ToscaContextual
public DeploymentTopologyDTO prepareDeployment(Topology topology, Application application, ApplicationEnvironment environment) {
FlowExecutionContext executionContext = flowExecutor.executeDeploymentFlow(topology, application, environment);
return build(executionContext);
}
@Override
@ToscaContextual
public DeploymentTopologyDTO prepareDeployment(Topology topology, Application application, ApplicationEnvironment environment,
ApplicationTopologyVersion topologyVersion, IDeploymentConfigAction deploymentConfigAction) {
FlowExecutionContext executionContext = flowExecutor.executePreDeploymentFlow(topology,application,environment);
// Execute the update
deploymentConfigAction.execute(application, environment, topologyVersion, executionContext.getTopology());
executionContext = flowExecutor.executeDeploymentFlow(topology, application, environment);
return build(executionContext);
}
@Override
@ToscaContextual
public DeploymentTopologyDTO prepareDeployment(Topology topology, Supplier<FlowExecutionContext> contextSupplier) {
return build(contextSupplier.get());
}
/**
* Create a deployment topology dto from the context of the execution of a deployment flow.
*
* @param executionContext The deployment flow execution context.
* @return The deployment topology.
*/
private DeploymentTopologyDTO build(FlowExecutionContext executionContext) {
// re-create the deployment topology object for api compatibility purpose
DeploymentTopology deploymentTopology = new DeploymentTopology();
ReflectionUtil.mergeObject(executionContext.getTopology(), deploymentTopology);
deploymentTopology.setInitialTopologyId(executionContext.getTopology().getId());
deploymentTopology.setEnvironmentId(executionContext.getEnvironmentContext().get().getEnvironment().getId());
deploymentTopology.setVersionId(executionContext.getEnvironmentContext().get().getEnvironment().getTopologyVersion());
DeploymentTopologyDTO deploymentTopologyDTO = new DeploymentTopologyDTO();
topologyDTOBuilder.initTopologyDTO(deploymentTopology, deploymentTopologyDTO);
// Convert log result to validation result.
TopologyValidationResult validationResult = new TopologyValidationResult();
for (AbstractTask task : executionContext.getLog().getInfos()) {
validationResult.addInfo(task);
}
for (AbstractTask task : executionContext.getLog().getWarnings()) {
validationResult.addWarning(task);
}
for (AbstractTask task : executionContext.getLog().getErrors()) {
validationResult.addTask(task);
}
validationResult.setValid(validationResult.getTaskList() == null || validationResult.getTaskList().isEmpty());
deploymentTopologyDTO.setValidation(validationResult);
Optional<PreconfiguredInputsConfiguration> preconfiguredInputsConfiguration = executionContext.getConfiguration(PreconfiguredInputsConfiguration.class,
DeploymentTopologyDTOBuilder.class.getSimpleName());
if (!preconfiguredInputsConfiguration.isPresent()) {
deploymentTopology.setPreconfiguredInputProperties(Maps.newHashMap());
} else {
deploymentTopology.setPreconfiguredInputProperties(preconfiguredInputsConfiguration.get().getInputs());
}
Optional<DeploymentInputs> inputsOptional = executionContext.getConfiguration(DeploymentInputs.class,
DeploymentTopologyDTOBuilder.class.getSimpleName());
if (!inputsOptional.isPresent()) {
deploymentTopology.setDeployerInputProperties(Maps.newHashMap());
deploymentTopology.setUploadedInputArtifacts(Maps.newHashMap());
} else {
deploymentTopology.setDeployerInputProperties(inputsOptional.get().getInputs());
deploymentTopology.setUploadedInputArtifacts(inputsOptional.get().getInputArtifacts());
}
Optional<DeploymentMatchingConfiguration> matchingConfigurationOptional = executionContext.getConfiguration(DeploymentMatchingConfiguration.class,
DeploymentTopologyDTOBuilder.class.getSimpleName());
if (!matchingConfigurationOptional.isPresent()) {
return deploymentTopologyDTO;
}
DeploymentMatchingConfiguration matchingConfiguration = matchingConfigurationOptional.get();
deploymentTopology.setOrchestratorId(matchingConfiguration.getOrchestratorId());
deploymentTopology.setLocationGroups(matchingConfiguration.getLocationGroups());
deploymentTopologyDTO.setLocationPolicies(matchingConfiguration.getLocationIds());
// Good enough approximation as it doesn't contains just the location dependencies.
deploymentTopology.setLocationDependencies(executionContext.getTopology().getDependencies());
DeploymentSubstitutionConfiguration substitutionConfiguration = new DeploymentSubstitutionConfiguration();
substitutionConfiguration.setSubstitutionTypes(new LocationResourceTypes());
// fill DTO with policies substitution stuffs
fillDTOWithPoliciesSubstitutionConfiguration(executionContext, deploymentTopology, deploymentTopologyDTO, matchingConfiguration,
substitutionConfiguration);
// fill DTO with nodes substitution stuffs
fillDTOWithNodesSubstitutionConfiguration(executionContext, deploymentTopology, deploymentTopologyDTO, matchingConfiguration,
substitutionConfiguration);
deploymentTopologyDTO.setAvailableSubstitutions(substitutionConfiguration);
ApplicationEnvironment environment = executionContext.getEnvironmentContext().get().getEnvironment();
OrchestratorDeploymentProperties orchestratorDeploymentProperties = executionContext
.getConfiguration(OrchestratorDeploymentProperties.class, this.getClass().getSimpleName())
.orElse(new OrchestratorDeploymentProperties(environment.getTopologyVersion(), environment.getId(), matchingConfiguration.getOrchestratorId()));
deploymentTopology.setProviderDeploymentProperties(orchestratorDeploymentProperties.getProviderDeploymentProperties());
deploymentTopologyDTO
.setSecretCredentialInfos((List<SecretCredentialInfo>) executionContext.getExecutionCache().get(FlowExecutionContext.SECRET_CREDENTIAL));
deploymentTopologyDTO.setUnprocessedTopology((Topology) executionContext.getExecutionCache().get(FlowExecutionContext.INITIAL_TOPOLOGY));
return deploymentTopologyDTO;
}
private void fillDTOWithNodesSubstitutionConfiguration(FlowExecutionContext executionContext, DeploymentTopology deploymentTopology,
DeploymentTopologyDTO deploymentTopologyDTO, DeploymentMatchingConfiguration matchingConfiguration,
DeploymentSubstitutionConfiguration substitutionConfiguration) {
// used by ui to know if a property is editable. This should however be done differently with a better v2 api.
deploymentTopology.setOriginalNodes((Map<String, NodeTemplate>) executionContext.getExecutionCache().get(FlowExecutionContext.MATCHING_ORIGINAL_NODES));
deploymentTopology.setSubstitutedNodes(matchingConfiguration.getMatchedLocationResources());
deploymentTopology
.setMatchReplacedNodes((Map<String, NodeTemplate>) executionContext.getExecutionCache().get(FlowExecutionContext.MATCHING_REPLACED_NODES));
// Restrict the map of LocationResourceTemplate to the ones that are actually substituted after matching.
Map<String, LocationResourceTemplate> allLocationResourcesTemplates = (Map<String, LocationResourceTemplate>) executionContext.getExecutionCache()
.get(FlowExecutionContext.MATCHED_NODE_LOCATION_TEMPLATES_BY_ID_MAP);
Map<String, LocationResourceTemplate> substitutedLocationResourceTemplate = Maps.newHashMap(); //
matchingConfiguration.getMatchedLocationResources().values().forEach((locationResourceId) -> substitutedLocationResourceTemplate.put(locationResourceId,
safe(allLocationResourcesTemplates).get(locationResourceId)));
deploymentTopologyDTO.setLocationResourceTemplates(substitutedLocationResourceTemplate);
substitutionConfiguration.setAvailableSubstitutions(
(Map<String, Set<String>>) executionContext.getExecutionCache().get(FlowExecutionContext.SELECTED_MATCH_NODE_LOCATION_TEMPLATE_BY_NODE_ID_MAP));
substitutionConfiguration.setSubstitutionsTemplates(allLocationResourcesTemplates);
// Fetch all required types associated with the location substitution templates.
substitutionConfiguration.getSubstitutionTypes()
.addFrom(locationResourceService.getLocationResourceTypes(safe(substitutionConfiguration.getSubstitutionsTemplates()).values()));
enrichSubstitutionTypesWithServicesDependencies(safe(substitutionConfiguration.getSubstitutionsTemplates()).values(),
substitutionConfiguration.getSubstitutionTypes());
}
private void fillDTOWithPoliciesSubstitutionConfiguration(FlowExecutionContext executionContext, DeploymentTopology deploymentTopology,
DeploymentTopologyDTO deploymentTopologyDTO, DeploymentMatchingConfiguration matchingConfiguration,
DeploymentSubstitutionConfiguration substitutionConfiguration) {
// used by ui to know if a property is editable. This should however be done differently with a better v2 api.
deploymentTopology
.setOriginalPolicies((Map<String, PolicyTemplate>) executionContext.getExecutionCache().get(FlowExecutionContext.MATCHING_ORIGINAL_POLICIES));
deploymentTopology.setSubstitutedPolicies(matchingConfiguration.getMatchedPolicies());
// Restrict the map of PolicyLocationResourceTemplate to the ones that are actually substituted after matching.
Map<String, PolicyLocationResourceTemplate> allResourcesTemplates = (Map<String, PolicyLocationResourceTemplate>) executionContext.getExecutionCache()
.get(FlowExecutionContext.MATCHED_POLICY_LOCATION_TEMPLATES_BY_ID_MAP);
Map<String, PolicyLocationResourceTemplate> substitutedResourceTemplates = Maps.newHashMap(); //
safe(matchingConfiguration.getMatchedPolicies()).values()
.forEach((locationResourceId) -> substitutedResourceTemplates.put(locationResourceId, safe(allResourcesTemplates).get(locationResourceId)));
deploymentTopologyDTO.setPolicyLocationResourceTemplates(substitutedResourceTemplates);
substitutionConfiguration.setAvailablePoliciesSubstitutions((Map<String, Set<String>>) executionContext.getExecutionCache()
.get(FlowExecutionContext.SELECTED_MATCH_POLICY_LOCATION_TEMPLATE_BY_NODE_ID_MAP));
substitutionConfiguration.setSubstitutionsPoliciesTemplates(allResourcesTemplates);
// Fetch all required types associated with the location substitution templates.
substitutionConfiguration.getSubstitutionTypes().addFrom(
locationResourceService.getPoliciesLocationResourceTypes(safe(substitutionConfiguration.getSubstitutionsPoliciesTemplates()).values()));
}
/**
* Enrich {@link LocationResourceTypes} adding types coming from on demand service resources.
*/
private void enrichSubstitutionTypesWithServicesDependencies(Collection<LocationResourceTemplate> resourceTemplates,
LocationResourceTypes locationResourceTypes) {
Set<String> serviceTypes = Sets.newHashSet();
Set<CSARDependency> dependencies = Sets.newHashSet();
for (LocationResourceTemplate resourceTemplate : resourceTemplates) {
if (resourceTemplate.isService()) {
String serviceId = resourceTemplate.getId();
ServiceResource serviceResource = serviceResourceService.getOrFail(serviceId);
NodeType serviceType = ToscaContext.get(NodeType.class, serviceResource.getNodeInstance().getNodeTemplate().getType());
if (serviceType == null || !serviceType.getArchiveVersion().equals(serviceResource.getNodeInstance().getTypeVersion())) {
serviceType = toscaTypeSearchService.findOrFail(NodeType.class, serviceResource.getNodeInstance().getNodeTemplate().getType(),
serviceResource.getNodeInstance().getTypeVersion());
}
dependencies.addAll(csarDependencyLoader.getDependencies(serviceType.getArchiveName(), serviceType.getArchiveVersion()));
dependencies.add(new CSARDependency(serviceType.getArchiveName(), serviceType.getArchiveVersion()));
serviceTypes.add(serviceResource.getNodeInstance().getNodeTemplate().getType());
}
}
locationResourceService.fillLocationResourceTypes(serviceTypes, locationResourceTypes, dependencies);
}
}
| |
package com.google.devrel.training.conference.spi;
import static com.google.devrel.training.conference.service.OfyService.ofy;
import static org.junit.Assert.*;
import com.google.api.server.spi.response.UnauthorizedException;
import com.google.appengine.api.users.User;
import com.google.appengine.tools.development.testing.LocalDatastoreServiceTestConfig;
import com.google.appengine.tools.development.testing.LocalServiceTestHelper;
import com.google.devrel.training.conference.domain.Conference;
// import com.google.devrel.training.conference.domain.Conference;
import com.google.devrel.training.conference.domain.Profile;
import com.google.devrel.training.conference.form.ConferenceForm;
// import com.google.devrel.training.conference.form.ConferenceForm;
import com.google.devrel.training.conference.form.ProfileForm;
import com.google.devrel.training.conference.form.ProfileForm.TeeShirtSize;
import com.googlecode.objectify.Key;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
/**
* Tests for ConferenceApi API methods.
*/
public class ConferenceApiTest {
private static final String EMAIL = "example@gmail.com";
private static final String USER_ID = "123456789";
private static final TeeShirtSize TEE_SHIRT_SIZE = TeeShirtSize.NOT_SPECIFIED;
private static final String DISPLAY_NAME = "Your Name Here";
private static final String NAME = "GCP Live";
private static final String DESCRIPTION = "New announcements for Google Cloud Platform";
private static final String CITY = "San Francisco";
private static final int MONTH = 3;
private static final int CAP = 500;
private User user;
private ConferenceApi conferenceApi;
private final LocalServiceTestHelper helper =
new LocalServiceTestHelper(new LocalDatastoreServiceTestConfig()
.setDefaultHighRepJobPolicyUnappliedJobPercentage(100));
@Before
public void setUp() throws Exception {
helper.setUp();
user = new User(EMAIL, "gmail.com", USER_ID);
conferenceApi = new ConferenceApi();
}
@After
public void tearDown() throws Exception {
ofy().clear();
helper.tearDown();
}
@Test(expected = UnauthorizedException.class)
public void testGetProfileWithoutUser() throws Exception {
conferenceApi.getProfile(null);
}
@Test
public void testGetProfileFirstTime() throws Exception {
Profile profile = ofy().load().key(Key.create(Profile.class, user.getUserId())).now();
assertNull(profile);
profile = conferenceApi.getProfile(user);
assertNull(profile);
}
@Test
public void testSaveProfile() throws Exception {
// Save the profile for the first time.
Profile profile = conferenceApi.saveProfile(
user, new ProfileForm(DISPLAY_NAME, TEE_SHIRT_SIZE));
// Check the return value first.
assertEquals(USER_ID, profile.getUserId());
assertEquals(EMAIL, profile.getMainEmail());
assertEquals(TEE_SHIRT_SIZE, profile.getTeeShirtSize());
assertEquals(DISPLAY_NAME, profile.getDisplayName());
// Fetch the Profile via Objectify.
profile = ofy().load().key(Key.create(Profile.class, user.getUserId())).now();
assertEquals(USER_ID, profile.getUserId());
assertEquals(EMAIL, profile.getMainEmail());
assertEquals(TEE_SHIRT_SIZE, profile.getTeeShirtSize());
assertEquals(DISPLAY_NAME, profile.getDisplayName());
}
@Test
public void testSaveProfileWithNull() throws Exception {
// Save the profile for the first time with null values.
Profile profile = conferenceApi.saveProfile(user, new ProfileForm(null, null));
String displayName = EMAIL.substring(0, EMAIL.indexOf("@"));
// Check the return value first.
assertEquals(USER_ID, profile.getUserId());
assertEquals(EMAIL, profile.getMainEmail());
assertEquals(TEE_SHIRT_SIZE, profile.getTeeShirtSize());
assertEquals(displayName, profile.getDisplayName());
// Fetch the Profile via Objectify.
profile = ofy().load().key(Key.create(Profile.class, user.getUserId())).now();
assertEquals(USER_ID, profile.getUserId());
assertEquals(EMAIL, profile.getMainEmail());
assertEquals(TEE_SHIRT_SIZE, profile.getTeeShirtSize());
assertEquals(displayName, profile.getDisplayName());
}
@Test
public void testGetProfile() throws Exception {
conferenceApi.saveProfile(user, new ProfileForm(DISPLAY_NAME, TEE_SHIRT_SIZE));
// Fetch the Profile via the API.
Profile profile = conferenceApi.getProfile(user);
assertEquals(USER_ID, profile.getUserId());
assertEquals(EMAIL, profile.getMainEmail());
assertEquals(TEE_SHIRT_SIZE, profile.getTeeShirtSize());
assertEquals(DISPLAY_NAME, profile.getDisplayName());
}
@Test
public void testUpdateProfile() throws Exception {
// Save for the first time.
conferenceApi.saveProfile(user, new ProfileForm(DISPLAY_NAME, TEE_SHIRT_SIZE));
Profile profile = ofy().load().key(Key.create(Profile.class, user.getUserId())).now();
assertEquals(USER_ID, profile.getUserId());
assertEquals(EMAIL, profile.getMainEmail());
assertEquals(TEE_SHIRT_SIZE, profile.getTeeShirtSize());
assertEquals(DISPLAY_NAME, profile.getDisplayName());
// Then try to update it.
String newDisplayName = "New Name";
TeeShirtSize newTeeShirtSize = TeeShirtSize.L;
conferenceApi.saveProfile(user, new ProfileForm(newDisplayName, newTeeShirtSize));
profile = ofy().load().key(Key.create(Profile.class, user.getUserId())).now();
assertEquals(USER_ID, profile.getUserId());
assertEquals(EMAIL, profile.getMainEmail());
assertEquals(newTeeShirtSize, profile.getTeeShirtSize());
assertEquals(newDisplayName, profile.getDisplayName());
}
@Test
public void testUpdateProfileWithNulls() throws Exception {
conferenceApi.saveProfile(user, new ProfileForm(DISPLAY_NAME, TEE_SHIRT_SIZE));
// Update the Profile with null values.
Profile profile = conferenceApi.saveProfile(user, new ProfileForm(null, null));
// Expected behavior is that the existing properties do not get overwritten
// Check the return value first.
assertEquals(USER_ID, profile.getUserId());
assertEquals(EMAIL, profile.getMainEmail());
assertEquals(TEE_SHIRT_SIZE, profile.getTeeShirtSize());
assertEquals(DISPLAY_NAME, profile.getDisplayName());
// Fetch the Profile via Objectify.
profile = ofy().load().key(Key.create(Profile.class, user.getUserId())).now();
assertEquals(USER_ID, profile.getUserId());
assertEquals(EMAIL, profile.getMainEmail());
assertEquals(TEE_SHIRT_SIZE, profile.getTeeShirtSize());
assertEquals(DISPLAY_NAME, profile.getDisplayName());
}
@Test
public void testCreateConference() throws Exception {
DateFormat dateFormat = new SimpleDateFormat("MM/dd/yyyy");
Date startDate = dateFormat.parse("03/25/2014");
Date endDate = dateFormat.parse("03/26/2014");
List<String> topics = new ArrayList<>();
topics.add("Google");
topics.add("Cloud");
topics.add("Platform");
ConferenceForm conferenceForm = new ConferenceForm(
NAME, DESCRIPTION, topics, CITY, startDate, endDate, CAP);
Conference conference = conferenceApi.createConference(user, conferenceForm);
// Check the return value.
assertEquals(NAME, conference.getName());
assertEquals(DESCRIPTION, conference.getDescription());
assertEquals(topics, conference.getTopics());
assertEquals(USER_ID, conference.getOrganizerUserId());
assertEquals(CITY, conference.getCity());
assertEquals(startDate, conference.getStartDate());
assertEquals(endDate, conference.getEndDate());
assertEquals(CAP, conference.getMaxAttendees());
assertEquals(CAP, conference.getSeatsAvailable());
assertEquals(MONTH, conference.getMonth());
// Check if a new Profile is created
Profile profile = ofy().load().key(Key.create(Profile.class, user.getUserId())).now();
assertEquals(USER_ID, profile.getUserId());
assertEquals(EMAIL, profile.getMainEmail());
assertEquals(TEE_SHIRT_SIZE, profile.getTeeShirtSize());
String displayName = EMAIL.substring(0, EMAIL.indexOf("@"));
assertEquals(displayName, profile.getDisplayName());
}
@Test
public void testGetConferencesCreated() throws Exception {
DateFormat dateFormat = new SimpleDateFormat("MM/dd/yyyy");
Date startDate = dateFormat.parse("03/25/2014");
Date endDate = dateFormat.parse("03/26/2014");
List<String> topics = new ArrayList<>();
topics.add("Google");
topics.add("Cloud");
topics.add("Platform");
ConferenceForm conferenceForm = new ConferenceForm(
NAME, DESCRIPTION, topics, CITY, startDate, endDate, CAP);
Conference conference = conferenceApi.createConference(user, conferenceForm);
List<Conference> conferencesCreated = conferenceApi.getConferencesCreated(user);
assertEquals(1, conferencesCreated.size());
assertTrue("The result should contain a conference",
conferencesCreated.contains(conference));
}
@Test
public void testGetConference() throws Exception {
DateFormat dateFormat = new SimpleDateFormat("MM/dd/yyyy");
Date startDate = dateFormat.parse("03/25/2014");
Date endDate = dateFormat.parse("03/26/2014");
List<String> topics = new ArrayList<>();
topics.add("Google");
topics.add("Cloud");
topics.add("Platform");
ConferenceForm conferenceForm = new ConferenceForm(
NAME, DESCRIPTION, topics, CITY, startDate, endDate, CAP);
Conference conference = conferenceApi.createConference(user, conferenceForm);
conference = conferenceApi.getConference(conference.getWebsafeKey());
// Check the return value.
assertEquals(NAME, conference.getName());
assertEquals(DESCRIPTION, conference.getDescription());
assertEquals(topics, conference.getTopics());
assertEquals(USER_ID, conference.getOrganizerUserId());
assertEquals(CITY, conference.getCity());
assertEquals(startDate, conference.getStartDate());
assertEquals(endDate, conference.getEndDate());
assertEquals(CAP, conference.getMaxAttendees());
assertEquals(CAP, conference.getSeatsAvailable());
assertEquals(MONTH, conference.getMonth());
}
@Test
public void testRegistrations() throws Exception {
DateFormat dateFormat = new SimpleDateFormat("MM/dd/yyyy");
Date startDate = dateFormat.parse("03/25/2014");
Date endDate = dateFormat.parse("03/26/2014");
List<String> topics = new ArrayList<>();
topics.add("Google");
topics.add("Cloud");
topics.add("Platform");
ConferenceForm conferenceForm = new ConferenceForm(
NAME, DESCRIPTION, topics, CITY, startDate, endDate, CAP);
Conference conference = conferenceApi.createConference(user, conferenceForm);
Long conferenceId = conference.getId();
// Registration
Boolean result = conferenceApi.registerForConference(
user, conference.getWebsafeKey()).getResult();
conference = conferenceApi.getConference(conference.getWebsafeKey());
Profile profile = ofy().load().key(Key.create(Profile.class, user.getUserId())).now();
assertTrue("registerForConference should succeed.", result);
assertEquals(CAP - 1, conference.getSeatsAvailable());
assertTrue("Profile should have the conferenceId in conferenceIdsToAttend.",
profile.getConferenceKeysToAttend().contains(conference.getWebsafeKey()));
// Unregister
result = conferenceApi.unregisterFromConference(
user, conference.getWebsafeKey()).getResult();
conference = conferenceApi.getConference(conference.getWebsafeKey());
profile = ofy().load().key(Key.create(Profile.class, user.getUserId())).now();
assertTrue("unregisterFromConference should succeed.", result);
assertEquals(CAP, conference.getSeatsAvailable());
assertFalse("Profile shouldn't have the conferenceId in conferenceIdsToAttend.",
profile.getConferenceKeysToAttend().contains(conference.getWebsafeKey()));
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.namenode;
import static org.junit.Assert.*;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Enumeration;
import java.util.List;
import java.util.regex.Pattern;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSTestUtil;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.web.WebHdfsTestUtil;
import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.test.PathUtils;
import org.apache.log4j.Appender;
import org.apache.log4j.AsyncAppender;
import org.apache.log4j.Level;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import org.apache.log4j.PatternLayout;
import org.apache.log4j.RollingFileAppender;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
/**
* A JUnit test that audit logs are generated
*/
@RunWith(Parameterized.class)
public class TestAuditLogs {
static final String auditLogFile = PathUtils.getTestDirName(TestAuditLogs.class) + "/TestAuditLogs-audit.log";
final boolean useAsyncLog;
@Parameters
public static Collection<Object[]> data() {
Collection<Object[]> params = new ArrayList<Object[]>();
params.add(new Object[]{new Boolean(false)});
params.add(new Object[]{new Boolean(true)});
return params;
}
public TestAuditLogs(boolean useAsyncLog) {
this.useAsyncLog = useAsyncLog;
}
// Pattern for:
// allowed=(true|false) ugi=name ip=/address cmd={cmd} src={path} dst=null perm=null
static final Pattern auditPattern = Pattern.compile(
"allowed=.*?\\s" +
"ugi=.*?\\s" +
"ip=/\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\s" +
"cmd=.*?\\ssrc=.*?\\sdst=null\\s" +
"perm=.*?");
static final Pattern successPattern = Pattern.compile(
".*allowed=true.*");
static final Pattern webOpenPattern = Pattern.compile(
".*cmd=open.*proto=webhdfs.*");
static final String username = "bob";
static final String[] groups = { "group1" };
static final String fileName = "/srcdat";
DFSTestUtil util;
MiniDFSCluster cluster;
FileSystem fs;
String fnames[];
Configuration conf;
UserGroupInformation userGroupInfo;
@Before
public void setupCluster() throws Exception {
// must configure prior to instantiating the namesystem because it
// will reconfigure the logger if async is enabled
configureAuditLogs();
conf = new HdfsConfiguration();
final long precision = 1L;
conf.setLong(DFSConfigKeys.DFS_NAMENODE_ACCESSTIME_PRECISION_KEY, precision);
conf.setLong(DFSConfigKeys.DFS_BLOCKREPORT_INTERVAL_MSEC_KEY, 10000L);
conf.setBoolean(DFSConfigKeys.DFS_WEBHDFS_ENABLED_KEY, true);
conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_AUDIT_LOG_ASYNC_KEY, useAsyncLog);
util = new DFSTestUtil.Builder().setName("TestAuditAllowed").
setNumFiles(20).build();
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(4).build();
fs = cluster.getFileSystem();
util.createFiles(fs, fileName);
// make sure the appender is what it's supposed to be
Logger logger = ((Log4JLogger) FSNamesystem.auditLog).getLogger();
@SuppressWarnings("unchecked")
List<Appender> appenders = Collections.list(logger.getAllAppenders());
assertEquals(1, appenders.size());
assertEquals(useAsyncLog, appenders.get(0) instanceof AsyncAppender);
fnames = util.getFileNames(fileName);
util.waitReplication(fs, fileName, (short)3);
userGroupInfo = UserGroupInformation.createUserForTesting(username, groups);
}
@After
public void teardownCluster() throws Exception {
util.cleanup(fs, "/srcdat");
fs.close();
cluster.shutdown();
}
/** test that allowed operation puts proper entry in audit log */
@Test
public void testAuditAllowed() throws Exception {
final Path file = new Path(fnames[0]);
FileSystem userfs = DFSTestUtil.getFileSystemAs(userGroupInfo, conf);
setupAuditLogs();
InputStream istream = userfs.open(file);
int val = istream.read();
istream.close();
verifyAuditLogs(true);
assertTrue("failed to read from file", val >= 0);
}
/** test that allowed stat puts proper entry in audit log */
@Test
public void testAuditAllowedStat() throws Exception {
final Path file = new Path(fnames[0]);
FileSystem userfs = DFSTestUtil.getFileSystemAs(userGroupInfo, conf);
setupAuditLogs();
FileStatus st = userfs.getFileStatus(file);
verifyAuditLogs(true);
assertTrue("failed to stat file", st != null && st.isFile());
}
/** test that denied operation puts proper entry in audit log */
@Test
public void testAuditDenied() throws Exception {
final Path file = new Path(fnames[0]);
FileSystem userfs = DFSTestUtil.getFileSystemAs(userGroupInfo, conf);
fs.setPermission(file, new FsPermission((short)0600));
fs.setOwner(file, "root", null);
setupAuditLogs();
try {
userfs.open(file);
fail("open must not succeed");
} catch(AccessControlException e) {
System.out.println("got access denied, as expected.");
}
verifyAuditLogs(false);
}
/** test that access via webhdfs puts proper entry in audit log */
@Test
public void testAuditWebHdfs() throws Exception {
final Path file = new Path(fnames[0]);
fs.setPermission(file, new FsPermission((short)0644));
fs.setOwner(file, "root", null);
setupAuditLogs();
WebHdfsFileSystem webfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(userGroupInfo, conf, WebHdfsFileSystem.SCHEME);
InputStream istream = webfs.open(file);
int val = istream.read();
istream.close();
verifyAuditLogsRepeat(true, 3);
assertTrue("failed to read from file", val >= 0);
}
/** test that stat via webhdfs puts proper entry in audit log */
@Test
public void testAuditWebHdfsStat() throws Exception {
final Path file = new Path(fnames[0]);
fs.setPermission(file, new FsPermission((short)0644));
fs.setOwner(file, "root", null);
setupAuditLogs();
WebHdfsFileSystem webfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(userGroupInfo, conf, WebHdfsFileSystem.SCHEME);
FileStatus st = webfs.getFileStatus(file);
verifyAuditLogs(true);
assertTrue("failed to stat file", st != null && st.isFile());
}
/** test that denied access via webhdfs puts proper entry in audit log */
@Test
public void testAuditWebHdfsDenied() throws Exception {
final Path file = new Path(fnames[0]);
fs.setPermission(file, new FsPermission((short)0600));
fs.setOwner(file, "root", null);
setupAuditLogs();
try {
WebHdfsFileSystem webfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(userGroupInfo, conf, WebHdfsFileSystem.SCHEME);
InputStream istream = webfs.open(file);
int val = istream.read();
fail("open+read must not succeed, got " + val);
} catch(AccessControlException E) {
System.out.println("got access denied, as expected.");
}
verifyAuditLogsRepeat(false, 2);
}
/** test that open via webhdfs puts proper entry in audit log */
@Test
public void testAuditWebHdfsOpen() throws Exception {
final Path file = new Path(fnames[0]);
fs.setPermission(file, new FsPermission((short)0644));
fs.setOwner(file, "root", null);
setupAuditLogs();
WebHdfsFileSystem webfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(userGroupInfo, conf, WebHdfsFileSystem.SCHEME);
webfs.open(file);
verifyAuditLogsCheckPattern(true, 3, webOpenPattern);
}
/** Sets up log4j logger for auditlogs */
private void setupAuditLogs() throws IOException {
Logger logger = ((Log4JLogger) FSNamesystem.auditLog).getLogger();
// enable logging now that the test is ready to run
logger.setLevel(Level.INFO);
}
private void configureAuditLogs() throws IOException {
// Shutdown the LogManager to release all logger open file handles.
// Unfortunately, Apache commons logging library does not provide
// means to release underlying loggers. For additional info look up
// commons library FAQ.
LogManager.shutdown();
File file = new File(auditLogFile);
if (file.exists()) {
assertTrue(file.delete());
}
Logger logger = ((Log4JLogger) FSNamesystem.auditLog).getLogger();
// disable logging while the cluster startup preps files
logger.setLevel(Level.OFF);
PatternLayout layout = new PatternLayout("%m%n");
RollingFileAppender appender = new RollingFileAppender(layout, auditLogFile);
logger.addAppender(appender);
}
// Ensure audit log has only one entry
private void verifyAuditLogs(boolean expectSuccess) throws IOException {
verifyAuditLogsRepeat(expectSuccess, 1);
}
// Ensure audit log has exactly N entries
private void verifyAuditLogsRepeat(boolean expectSuccess, int ndupe)
throws IOException {
// Turn off the logs
Logger logger = ((Log4JLogger) FSNamesystem.auditLog).getLogger();
logger.setLevel(Level.OFF);
// Close the appenders and force all logs to be flushed
Enumeration<?> appenders = logger.getAllAppenders();
while (appenders.hasMoreElements()) {
Appender appender = (Appender)appenders.nextElement();
appender.close();
}
BufferedReader reader = new BufferedReader(new FileReader(auditLogFile));
String line = null;
boolean ret = true;
try {
for (int i = 0; i < ndupe; i++) {
line = reader.readLine();
assertNotNull(line);
assertTrue("Expected audit event not found in audit log",
auditPattern.matcher(line).matches());
ret &= successPattern.matcher(line).matches();
}
assertNull("Unexpected event in audit log", reader.readLine());
assertTrue("Expected success=" + expectSuccess, ret == expectSuccess);
} finally {
reader.close();
}
}
// Ensure audit log has exactly N entries
private void verifyAuditLogsCheckPattern(boolean expectSuccess, int ndupe, Pattern pattern)
throws IOException {
// Turn off the logs
Logger logger = ((Log4JLogger) FSNamesystem.auditLog).getLogger();
logger.setLevel(Level.OFF);
// Close the appenders and force all logs to be flushed
Enumeration<?> appenders = logger.getAllAppenders();
while (appenders.hasMoreElements()) {
Appender appender = (Appender)appenders.nextElement();
appender.close();
}
BufferedReader reader = new BufferedReader(new FileReader(auditLogFile));
String line = null;
boolean ret = true;
boolean patternMatches = false;
try {
for (int i = 0; i < ndupe; i++) {
line = reader.readLine();
assertNotNull(line);
patternMatches |= pattern.matcher(line).matches();
ret &= successPattern.matcher(line).matches();
}
assertNull("Unexpected event in audit log", reader.readLine());
assertTrue("Expected audit event not found in audit log", patternMatches);
assertTrue("Expected success=" + expectSuccess, ret == expectSuccess);
} finally {
reader.close();
}
}
}
| |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.kinesisanalytics.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Describes updates to apply to an existing Amazon Kinesis Analytics application.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/kinesisanalytics-2015-08-14/ApplicationUpdate" target="_top">AWS
* API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ApplicationUpdate implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* Describes application input configuration updates.
* </p>
*/
private java.util.List<InputUpdate> inputUpdates;
/**
* <p>
* Describes application code updates.
* </p>
*/
private String applicationCodeUpdate;
/**
* <p>
* Describes application output configuration updates.
* </p>
*/
private java.util.List<OutputUpdate> outputUpdates;
/**
* <p>
* Describes application reference data source updates.
* </p>
*/
private java.util.List<ReferenceDataSourceUpdate> referenceDataSourceUpdates;
/**
* <p>
* Describes application CloudWatch logging option updates.
* </p>
*/
private java.util.List<CloudWatchLoggingOptionUpdate> cloudWatchLoggingOptionUpdates;
/**
* <p>
* Describes application input configuration updates.
* </p>
*
* @return Describes application input configuration updates.
*/
public java.util.List<InputUpdate> getInputUpdates() {
return inputUpdates;
}
/**
* <p>
* Describes application input configuration updates.
* </p>
*
* @param inputUpdates
* Describes application input configuration updates.
*/
public void setInputUpdates(java.util.Collection<InputUpdate> inputUpdates) {
if (inputUpdates == null) {
this.inputUpdates = null;
return;
}
this.inputUpdates = new java.util.ArrayList<InputUpdate>(inputUpdates);
}
/**
* <p>
* Describes application input configuration updates.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setInputUpdates(java.util.Collection)} or {@link #withInputUpdates(java.util.Collection)} if you want to
* override the existing values.
* </p>
*
* @param inputUpdates
* Describes application input configuration updates.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ApplicationUpdate withInputUpdates(InputUpdate... inputUpdates) {
if (this.inputUpdates == null) {
setInputUpdates(new java.util.ArrayList<InputUpdate>(inputUpdates.length));
}
for (InputUpdate ele : inputUpdates) {
this.inputUpdates.add(ele);
}
return this;
}
/**
* <p>
* Describes application input configuration updates.
* </p>
*
* @param inputUpdates
* Describes application input configuration updates.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ApplicationUpdate withInputUpdates(java.util.Collection<InputUpdate> inputUpdates) {
setInputUpdates(inputUpdates);
return this;
}
/**
* <p>
* Describes application code updates.
* </p>
*
* @param applicationCodeUpdate
* Describes application code updates.
*/
public void setApplicationCodeUpdate(String applicationCodeUpdate) {
this.applicationCodeUpdate = applicationCodeUpdate;
}
/**
* <p>
* Describes application code updates.
* </p>
*
* @return Describes application code updates.
*/
public String getApplicationCodeUpdate() {
return this.applicationCodeUpdate;
}
/**
* <p>
* Describes application code updates.
* </p>
*
* @param applicationCodeUpdate
* Describes application code updates.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ApplicationUpdate withApplicationCodeUpdate(String applicationCodeUpdate) {
setApplicationCodeUpdate(applicationCodeUpdate);
return this;
}
/**
* <p>
* Describes application output configuration updates.
* </p>
*
* @return Describes application output configuration updates.
*/
public java.util.List<OutputUpdate> getOutputUpdates() {
return outputUpdates;
}
/**
* <p>
* Describes application output configuration updates.
* </p>
*
* @param outputUpdates
* Describes application output configuration updates.
*/
public void setOutputUpdates(java.util.Collection<OutputUpdate> outputUpdates) {
if (outputUpdates == null) {
this.outputUpdates = null;
return;
}
this.outputUpdates = new java.util.ArrayList<OutputUpdate>(outputUpdates);
}
/**
* <p>
* Describes application output configuration updates.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setOutputUpdates(java.util.Collection)} or {@link #withOutputUpdates(java.util.Collection)} if you want
* to override the existing values.
* </p>
*
* @param outputUpdates
* Describes application output configuration updates.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ApplicationUpdate withOutputUpdates(OutputUpdate... outputUpdates) {
if (this.outputUpdates == null) {
setOutputUpdates(new java.util.ArrayList<OutputUpdate>(outputUpdates.length));
}
for (OutputUpdate ele : outputUpdates) {
this.outputUpdates.add(ele);
}
return this;
}
/**
* <p>
* Describes application output configuration updates.
* </p>
*
* @param outputUpdates
* Describes application output configuration updates.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ApplicationUpdate withOutputUpdates(java.util.Collection<OutputUpdate> outputUpdates) {
setOutputUpdates(outputUpdates);
return this;
}
/**
* <p>
* Describes application reference data source updates.
* </p>
*
* @return Describes application reference data source updates.
*/
public java.util.List<ReferenceDataSourceUpdate> getReferenceDataSourceUpdates() {
return referenceDataSourceUpdates;
}
/**
* <p>
* Describes application reference data source updates.
* </p>
*
* @param referenceDataSourceUpdates
* Describes application reference data source updates.
*/
public void setReferenceDataSourceUpdates(java.util.Collection<ReferenceDataSourceUpdate> referenceDataSourceUpdates) {
if (referenceDataSourceUpdates == null) {
this.referenceDataSourceUpdates = null;
return;
}
this.referenceDataSourceUpdates = new java.util.ArrayList<ReferenceDataSourceUpdate>(referenceDataSourceUpdates);
}
/**
* <p>
* Describes application reference data source updates.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setReferenceDataSourceUpdates(java.util.Collection)} or
* {@link #withReferenceDataSourceUpdates(java.util.Collection)} if you want to override the existing values.
* </p>
*
* @param referenceDataSourceUpdates
* Describes application reference data source updates.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ApplicationUpdate withReferenceDataSourceUpdates(ReferenceDataSourceUpdate... referenceDataSourceUpdates) {
if (this.referenceDataSourceUpdates == null) {
setReferenceDataSourceUpdates(new java.util.ArrayList<ReferenceDataSourceUpdate>(referenceDataSourceUpdates.length));
}
for (ReferenceDataSourceUpdate ele : referenceDataSourceUpdates) {
this.referenceDataSourceUpdates.add(ele);
}
return this;
}
/**
* <p>
* Describes application reference data source updates.
* </p>
*
* @param referenceDataSourceUpdates
* Describes application reference data source updates.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ApplicationUpdate withReferenceDataSourceUpdates(java.util.Collection<ReferenceDataSourceUpdate> referenceDataSourceUpdates) {
setReferenceDataSourceUpdates(referenceDataSourceUpdates);
return this;
}
/**
* <p>
* Describes application CloudWatch logging option updates.
* </p>
*
* @return Describes application CloudWatch logging option updates.
*/
public java.util.List<CloudWatchLoggingOptionUpdate> getCloudWatchLoggingOptionUpdates() {
return cloudWatchLoggingOptionUpdates;
}
/**
* <p>
* Describes application CloudWatch logging option updates.
* </p>
*
* @param cloudWatchLoggingOptionUpdates
* Describes application CloudWatch logging option updates.
*/
public void setCloudWatchLoggingOptionUpdates(java.util.Collection<CloudWatchLoggingOptionUpdate> cloudWatchLoggingOptionUpdates) {
if (cloudWatchLoggingOptionUpdates == null) {
this.cloudWatchLoggingOptionUpdates = null;
return;
}
this.cloudWatchLoggingOptionUpdates = new java.util.ArrayList<CloudWatchLoggingOptionUpdate>(cloudWatchLoggingOptionUpdates);
}
/**
* <p>
* Describes application CloudWatch logging option updates.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setCloudWatchLoggingOptionUpdates(java.util.Collection)} or
* {@link #withCloudWatchLoggingOptionUpdates(java.util.Collection)} if you want to override the existing values.
* </p>
*
* @param cloudWatchLoggingOptionUpdates
* Describes application CloudWatch logging option updates.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ApplicationUpdate withCloudWatchLoggingOptionUpdates(CloudWatchLoggingOptionUpdate... cloudWatchLoggingOptionUpdates) {
if (this.cloudWatchLoggingOptionUpdates == null) {
setCloudWatchLoggingOptionUpdates(new java.util.ArrayList<CloudWatchLoggingOptionUpdate>(cloudWatchLoggingOptionUpdates.length));
}
for (CloudWatchLoggingOptionUpdate ele : cloudWatchLoggingOptionUpdates) {
this.cloudWatchLoggingOptionUpdates.add(ele);
}
return this;
}
/**
* <p>
* Describes application CloudWatch logging option updates.
* </p>
*
* @param cloudWatchLoggingOptionUpdates
* Describes application CloudWatch logging option updates.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ApplicationUpdate withCloudWatchLoggingOptionUpdates(java.util.Collection<CloudWatchLoggingOptionUpdate> cloudWatchLoggingOptionUpdates) {
setCloudWatchLoggingOptionUpdates(cloudWatchLoggingOptionUpdates);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getInputUpdates() != null)
sb.append("InputUpdates: ").append(getInputUpdates()).append(",");
if (getApplicationCodeUpdate() != null)
sb.append("ApplicationCodeUpdate: ").append(getApplicationCodeUpdate()).append(",");
if (getOutputUpdates() != null)
sb.append("OutputUpdates: ").append(getOutputUpdates()).append(",");
if (getReferenceDataSourceUpdates() != null)
sb.append("ReferenceDataSourceUpdates: ").append(getReferenceDataSourceUpdates()).append(",");
if (getCloudWatchLoggingOptionUpdates() != null)
sb.append("CloudWatchLoggingOptionUpdates: ").append(getCloudWatchLoggingOptionUpdates());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ApplicationUpdate == false)
return false;
ApplicationUpdate other = (ApplicationUpdate) obj;
if (other.getInputUpdates() == null ^ this.getInputUpdates() == null)
return false;
if (other.getInputUpdates() != null && other.getInputUpdates().equals(this.getInputUpdates()) == false)
return false;
if (other.getApplicationCodeUpdate() == null ^ this.getApplicationCodeUpdate() == null)
return false;
if (other.getApplicationCodeUpdate() != null && other.getApplicationCodeUpdate().equals(this.getApplicationCodeUpdate()) == false)
return false;
if (other.getOutputUpdates() == null ^ this.getOutputUpdates() == null)
return false;
if (other.getOutputUpdates() != null && other.getOutputUpdates().equals(this.getOutputUpdates()) == false)
return false;
if (other.getReferenceDataSourceUpdates() == null ^ this.getReferenceDataSourceUpdates() == null)
return false;
if (other.getReferenceDataSourceUpdates() != null && other.getReferenceDataSourceUpdates().equals(this.getReferenceDataSourceUpdates()) == false)
return false;
if (other.getCloudWatchLoggingOptionUpdates() == null ^ this.getCloudWatchLoggingOptionUpdates() == null)
return false;
if (other.getCloudWatchLoggingOptionUpdates() != null
&& other.getCloudWatchLoggingOptionUpdates().equals(this.getCloudWatchLoggingOptionUpdates()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getInputUpdates() == null) ? 0 : getInputUpdates().hashCode());
hashCode = prime * hashCode + ((getApplicationCodeUpdate() == null) ? 0 : getApplicationCodeUpdate().hashCode());
hashCode = prime * hashCode + ((getOutputUpdates() == null) ? 0 : getOutputUpdates().hashCode());
hashCode = prime * hashCode + ((getReferenceDataSourceUpdates() == null) ? 0 : getReferenceDataSourceUpdates().hashCode());
hashCode = prime * hashCode + ((getCloudWatchLoggingOptionUpdates() == null) ? 0 : getCloudWatchLoggingOptionUpdates().hashCode());
return hashCode;
}
@Override
public ApplicationUpdate clone() {
try {
return (ApplicationUpdate) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.kinesisanalytics.model.transform.ApplicationUpdateMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| |
package org.appenders.log4j2.elasticsearch;
/*-
* #%L
* log4j2-elasticsearch
* %%
* Copyright (C) 2018 Rafal Foltynski
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.util.MinimalPrettyPrinter;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
import com.fasterxml.jackson.databind.SerializationConfig;
import com.fasterxml.jackson.databind.SerializationFeature;
import com.fasterxml.jackson.module.afterburner.AfterburnerModule;
import org.apache.logging.log4j.core.LogEvent;
import org.apache.logging.log4j.core.config.Configuration;
import org.apache.logging.log4j.core.config.ConfigurationException;
import org.apache.logging.log4j.core.layout.AbstractLayout;
import org.apache.logging.log4j.message.Message;
import org.appenders.log4j2.elasticsearch.json.jackson.ExtendedLog4j2JsonModule;
import java.util.Arrays;
import java.util.Collection;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
/**
* Allows to customize serialization of incoming events. See {@link Builder} API docs for more details
*
* @deprecated As of 1.7, this class will be removed. Use {@link GenericItemSourceLayout}
* (or {@link JacksonJsonLayoutPlugin} for Log4j2 XML config support) instead.
*/
@Deprecated
public class JacksonJsonLayout extends AbstractLayout<ItemSource> implements LegacyItemSourceLayout, LifeCycle {
private volatile State state = State.STOPPED;
private final ObjectWriter objectWriter;
private final ItemSourceFactory itemSourceFactory;
protected JacksonJsonLayout(Configuration config, ObjectWriter configuredWriter, ItemSourceFactory itemSourceFactory) {
super(config, null, null);
this.objectWriter = configuredWriter;
this.itemSourceFactory = itemSourceFactory;
}
@Override
public String getContentType() {
return "application/json";
}
@Override
public byte[] toByteArray(LogEvent event) {
throw new UnsupportedOperationException("Cannot return unwrapped byte array. Use ItemSource based API");
}
@Override
public final ItemSource toSerializable(LogEvent event) {
return serialize(event);
}
@Override
public final ItemSource serialize(LogEvent event) {
return itemSourceFactory.create(event, objectWriter);
}
@Override
public final ItemSource serialize(Message message) {
return itemSourceFactory.create(message, objectWriter);
}
@Override
public final ItemSource serialize(Object source) {
return itemSourceFactory.create(source, objectWriter);
}
public static JacksonJsonLayout.Builder newBuilder() {
return new JacksonJsonLayout.Builder();
}
public static class Builder extends org.apache.logging.log4j.core.layout.AbstractLayout.Builder<JacksonJsonLayout.Builder> implements org.apache.logging.log4j.core.util.Builder<JacksonJsonLayout> {
/**
* Default: {@link StringItemSourceFactory}
*/
static final ItemSourceFactory DEFAULT_SOURCE_FACTORY = StringItemSourceFactory.newBuilder().build();
/**
* Default: {@code [ExtendedLog4j2JsonModule]}
*/
static final JacksonModule[] DEFAULT_JACKSON_MODULES = new JacksonModule[]{
new ExtendedLog4j2JsonModule()
};
private ItemSourceFactory itemSourceFactory = DEFAULT_SOURCE_FACTORY;
private JacksonMixIn[] mixins = new JacksonMixIn[0];
private JacksonModule[] jacksonModules = DEFAULT_JACKSON_MODULES;
private VirtualProperty[] virtualProperties = new VirtualProperty[0];
private VirtualPropertyFilter[] virtualPropertyFilters = new VirtualPropertyFilter[0];
private boolean useAfterburner;
private boolean singleThread;
@Override
public JacksonJsonLayout build() {
if (getConfiguration() == null) {
throw new ConfigurationException("No Configuration instance provided for " + JacksonJsonLayout.class.getSimpleName());
}
return new JacksonJsonLayout(
getConfiguration(),
createConfiguredWriter(),
itemSourceFactory
);
}
protected ObjectWriter createConfiguredWriter() {
ObjectMapper objectMapper = createDefaultObjectMapper();
return configureModules(objectMapper, getJacksonModules())
.configureMixins(objectMapper, Arrays.asList(mixins))
.configureVirtualProperties(objectMapper, virtualProperties, virtualPropertyFilters)
.createConfiguredWriter(objectMapper);
}
private Collection<JacksonModule> getJacksonModules() {
LinkedList<JacksonModule> linkedList = new LinkedList<>(Arrays.asList(DEFAULT_JACKSON_MODULES));
linkedList.addAll(Arrays.asList(this.jacksonModules));
if (useAfterburner) {
// com.fasterxml.jackson.module:jackson-module-afterburner required here
linkedList.add(new JacksonAfterburnerModuleConfigurer());
}
return new JacksonModulesList(linkedList);
}
protected ObjectWriter createConfiguredWriter(ObjectMapper objectMapper) {
return objectMapper.writer(new MinimalPrettyPrinter());
}
protected Builder configureModules(ObjectMapper objectMapper, Collection<JacksonModule> modules) {
for (JacksonModule module : modules) {
module.applyTo(objectMapper);
}
return this;
}
protected Builder configureMixins(ObjectMapper objectMapper, List<JacksonMixIn> mixins) {
for (JacksonMixIn mixin : mixins) {
objectMapper.addMixIn(mixin.getTargetClass(), mixin.getMixInClass());
}
return this;
}
protected Builder configureVirtualProperties(ObjectMapper objectMapper, VirtualProperty[] virtualProperties, VirtualPropertyFilter[] virtualPropertyFilters) {
ValueResolver valueResolver = createValueResolver();
for (VirtualProperty property : virtualProperties) {
if (!property.isDynamic()) {
property.setValue(valueResolver.resolve(property.getValue()));
}
}
SerializationConfig customConfig = objectMapper.getSerializationConfig()
.with(new JacksonHandlerInstantiator(
virtualProperties,
valueResolver,
virtualPropertyFilters
));
objectMapper.setConfig(customConfig);
return this;
}
/**
* @return resolver used when {@link VirtualProperty}(-ies) configured
*/
protected ValueResolver createValueResolver() {
return new Log4j2Lookup(getConfiguration().getStrSubstitutor());
}
protected ObjectMapper createDefaultObjectMapper() {
return new ExtendedObjectMapper(createJsonFactory())
.setSerializationInclusion(JsonInclude.Include.NON_EMPTY)
.configure(SerializationFeature.CLOSE_CLOSEABLE, false);
}
protected JsonFactory createJsonFactory() {
if (singleThread) {
return new SingleThreadJsonFactoryProvider().create();
}
return new JsonFactory();
}
/**
* @param itemSourceFactory {@link ItemSource} producer
* @return this
*/
public Builder withItemSourceFactory(ItemSourceFactory itemSourceFactory) {
this.itemSourceFactory = itemSourceFactory;
return this;
}
/**
* Allows to customize {@link LogEvent} and {@link Message} serialization,
* including user-provided {@link org.apache.logging.log4j.message.ObjectMessage}
*
* @param mixins mixins to be applied
* @return this
*/
public Builder withMixins(JacksonMixIn... mixins) {
this.mixins = mixins;
return this;
}
/**
* Allows to append properties to serialized {@link LogEvent} and {@link Message}.
*
* Non-dynamic properties ({@code VirtualProperty#dynamic == false}) will be resolved on {@link #build()} call.
*
* Dynamic properties ({@code VirtualProperty#isDynamic == true}) will NOT be resolved on {@link #build()} call and resolution will be deferred to underlying {@link VirtualPropertiesWriter}.
*
* Similar to Log4j2 {@code KeyValuePair}.
*
* @param virtualProperties properties to be appended to JSON output
* @return this
*/
public Builder withVirtualProperties(VirtualProperty... virtualProperties) {
this.virtualProperties = virtualProperties;
return this;
}
/**
* Allows to define inclusion/exclusion filters for {@link VirtualProperty}-ies.
*
* @param virtualPropertyFilters filters to be applied to each configured {@link VirtualProperty}
* @return this
*/
public Builder withVirtualPropertyFilters(VirtualPropertyFilter[] virtualPropertyFilters) {
this.virtualPropertyFilters = virtualPropertyFilters;
return this;
}
/**
* Allows to configure {@link AfterburnerModule} - (de)serialization optimizer
*
* @param useAfterburner if true, {@link AfterburnerModule} will be used, false otherwise
* @return this
*/
public Builder withAfterburner(boolean useAfterburner) {
this.useAfterburner = useAfterburner;
return this;
}
/**
* Allows to configure {@code org.appenders.st.jackson.SingleThreadJsonFactory}
*
* NOTE: Use ONLY when {@link JacksonJsonLayout#serialize(LogEvent)}/{@link JacksonJsonLayout#serialize(Message)}
* are called exclusively by a one thread at a time, e.g. with AsyncLogger
*
* @param singleThread if true, {@code org.appenders.st.jackson.SingleThreadJsonFactory} will be used to create serializers,
* otherwise {@code com.fasterxml.jackson.core.JsonFactory} will be used
* @return this
*/
public Builder withSingleThread(boolean singleThread) {
this.singleThread = singleThread;
return this;
}
/**
* Allow to configure additional {@code com.fasterxml.jackson.databind.Module} implementations
*
* @param modules Jackson modules to register on {@link #build()}
* @return this
*/
public Builder withJacksonModules(JacksonModule... modules) {
this.jacksonModules = modules;
return this;
}
/**
* First-comes, first-served {@link JacksonModule} linked list. Ensures that no more than one element of specific type is present.
*/
private static class JacksonModulesList extends LinkedHashSet<JacksonModule> {
public JacksonModulesList(LinkedList<JacksonModule> linkedList) {
super(linkedList);
}
/**
* Adds given {@link JacksonModule} to the list if element of the same type is not already present.
*
* @param jacksonModule {@link JacksonModule} to add
* @return <i>true</i>, if element was added, <i>false</i> otherwise
*/
@Override
public boolean add(JacksonModule jacksonModule) {
if (this.contains(jacksonModule)) {
return false;
}
return super.add(jacksonModule);
}
/**
* Checks if this list contains an element with the same class name.
*
* @param o element to check
* @return <i>true</i> if element is present, <i>false</i> otherwise
*/
@Override
public boolean contains(Object o) {
for (JacksonModule jacksonModule : this) {
if (jacksonModule.getClass().getName().equals(o.getClass().getName())) {
return true;
}
}
return false;
}
}
}
// ==========
// LIFECYCLE
// ==========
@Override
public void start() {
itemSourceFactory.start();
state = State.STARTED;
}
@Override
public void stop() {
if (!itemSourceFactory.isStopped()) {
itemSourceFactory.stop();
}
state = State.STOPPED;
}
@Override
public boolean isStarted() {
return state == State.STARTED;
}
@Override
public boolean isStopped() {
return state == State.STOPPED;
}
}
| |
/*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.wiki.parser;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.PushbackReader;
import java.io.Reader;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import org.apache.log4j.Logger;
import org.apache.oro.text.GlobCompiler;
import org.apache.oro.text.regex.MalformedPatternException;
import org.apache.oro.text.regex.Pattern;
import org.apache.oro.text.regex.PatternCompiler;
import org.apache.wiki.StringTransmutator;
import org.apache.wiki.WikiContext;
import org.apache.wiki.WikiEngine;
import org.jdom2.Element;
/**
* Provides an abstract class for the parser instances.
*
* @since 2.4
*/
public abstract class MarkupParser
{
/** Allow this many characters to be pushed back in the stream. In effect,
this limits the size of a single line. */
protected static final int PUSHBACK_BUFFER_SIZE = 10*1024;
protected PushbackReader m_in;
private int m_pos = -1; // current position in reader stream
protected WikiEngine m_engine;
protected WikiContext m_context;
/** Optionally stores internal wikilinks */
protected ArrayList<StringTransmutator> m_localLinkMutatorChain = new ArrayList<StringTransmutator>();
protected ArrayList<StringTransmutator> m_externalLinkMutatorChain = new ArrayList<StringTransmutator>();
protected ArrayList<StringTransmutator> m_attachmentLinkMutatorChain = new ArrayList<StringTransmutator>();
protected ArrayList<HeadingListener> m_headingListenerChain = new ArrayList<HeadingListener>();
protected ArrayList<StringTransmutator> m_linkMutators = new ArrayList<StringTransmutator>();
protected boolean m_inlineImages = true;
protected boolean m_parseAccessRules = true;
/** Keeps image regexp Patterns */
protected List< Pattern > m_inlineImagePatterns = null;
private static Logger log = Logger.getLogger( MarkupParser.class );
/** If set to "true", allows using raw HTML within Wiki text. Be warned,
this is a VERY dangerous option to set - never turn this on in a publicly
allowable Wiki, unless you are absolutely certain of what you're doing. */
public static final String PROP_ALLOWHTML = "jspwiki.translatorReader.allowHTML";
/** If set to "true", enables plugins during parsing */
public static final String PROP_RUNPLUGINS = "jspwiki.translatorReader.runPlugins";
/** Lists all punctuation characters allowed in WikiMarkup. These
will not be cleaned away. This is for compatibility for older versions
of JSPWiki. */
protected static final String LEGACY_CHARS_ALLOWED = "._";
/** Lists all punctuation characters allowed in page names. */
public static final String PUNCTUATION_CHARS_ALLOWED = " ()&+,-=._$";
/** Name of the outlink image; relative path to the JSPWiki directory. */
public static final String OUTLINK_IMAGE = "images/out.png";
/** If true, all outward links (external links) have a small link image appended. */
public static final String PROP_USEOUTLINKIMAGE = "jspwiki.translatorReader.useOutlinkImage";
private static final String INLINE_IMAGE_PATTERNS = "JSPWikiMarkupParser.inlineImagePatterns";
/**
* Constructs a MarkupParser. The subclass must call this constructor
* to set up the necessary bits and pieces.
*
* @param context The WikiContext.
* @param in The reader from which we are reading the bytes from.
*/
protected MarkupParser( WikiContext context, Reader in )
{
m_engine = context.getEngine();
m_context = context;
setInputReader( in );
}
/**
* Replaces the current input character stream with a new one.
* @param in New source for input. If null, this method does nothing.
* @return the old stream
*/
public Reader setInputReader( Reader in )
{
Reader old = m_in;
if( in != null )
{
m_in = new PushbackReader( new BufferedReader( in ),
PUSHBACK_BUFFER_SIZE );
}
return old;
}
/**
* Adds a hook for processing link texts. This hook is called
* when the link text is written into the output stream, and
* you may use it to modify the text. It does not affect the
* actual link, only the user-visible text.
*
* @param mutator The hook to call. Null is safe.
*/
public void addLinkTransmutator( StringTransmutator mutator )
{
if( mutator != null )
{
m_linkMutators.add( mutator );
}
}
/**
* Adds a hook for processing local links. The engine
* transforms both non-existing and existing page links.
*
* @param mutator The hook to call. Null is safe.
*/
public void addLocalLinkHook( StringTransmutator mutator )
{
if( mutator != null )
{
m_localLinkMutatorChain.add( mutator );
}
}
/**
* Adds a hook for processing external links. This includes
* all http:// ftp://, etc. links, including inlined images.
*
* @param mutator The hook to call. Null is safe.
*/
public void addExternalLinkHook( StringTransmutator mutator )
{
if( mutator != null )
{
m_externalLinkMutatorChain.add( mutator );
}
}
/**
* Adds a hook for processing attachment links.
*
* @param mutator The hook to call. Null is safe.
*/
public void addAttachmentLinkHook( StringTransmutator mutator )
{
if( mutator != null )
{
m_attachmentLinkMutatorChain.add( mutator );
}
}
/**
* Adds a HeadingListener to the parser chain. It will be called whenever
* a parsed header is found.
*
* @param listener The listener to add.
*/
public void addHeadingListener( HeadingListener listener )
{
if( listener != null )
{
m_headingListenerChain.add( listener );
}
}
/**
* Disables access rule parsing.
*/
public void disableAccessRules()
{
m_parseAccessRules = false;
}
public boolean isParseAccessRules()
{
return m_parseAccessRules;
}
/**
* Use this to turn on or off image inlining.
* @param toggle If true, images are inlined (as per set in jspwiki.properties)
* If false, then images won't be inlined; instead, they will be
* treated as standard hyperlinks.
* @since 2.2.9
*/
public void enableImageInlining( boolean toggle )
{
m_inlineImages = toggle;
}
public boolean isImageInlining() {
return m_inlineImages;
}
@SuppressWarnings( "unchecked" )
protected final void initInlineImagePatterns() {
PatternCompiler compiler = new GlobCompiler();
//
// We cache compiled patterns in the engine, since their creation is really expensive
//
List< Pattern > compiledpatterns = ( List< Pattern > )m_engine.getAttribute( INLINE_IMAGE_PATTERNS );
if( compiledpatterns == null ) {
compiledpatterns = new ArrayList< Pattern >( 20 );
Collection< String > ptrns = m_engine.getAllInlinedImagePatterns();
//
// Make them into Regexp Patterns. Unknown patterns are ignored.
//
for( Iterator< String > i = ptrns.iterator(); i.hasNext(); ) {
String pattern = i.next();
try {
compiledpatterns.add( compiler.compile( pattern,
GlobCompiler.DEFAULT_MASK | GlobCompiler.READ_ONLY_MASK ) );
} catch( MalformedPatternException e ) {
log.error( "Malformed pattern [" + pattern + "] in properties: ", e );
}
}
m_engine.setAttribute( INLINE_IMAGE_PATTERNS, compiledpatterns );
}
m_inlineImagePatterns = Collections.unmodifiableList( compiledpatterns );
}
public List< Pattern > getInlineImagePatterns() {
if( m_inlineImagePatterns == null ) {
initInlineImagePatterns();
}
return m_inlineImagePatterns;
}
/**
* Parses the document.
* @return the parsed document, as a WikiDocument
* @throws IOException If something goes wrong.
*/
public abstract WikiDocument parse()
throws IOException;
/**
* Return the current position in the reader stream.
* The value will be -1 prior to reading.
* @return the reader position as an int.
*/
public int getPosition()
{
return m_pos;
}
/**
* Returns the next token in the stream. This is the most called method
* in the entire parser, so it needs to be lean and mean.
*
* @return The next token in the stream; or, if the stream is ended, -1.
* @throws IOException If something bad happens
* @throws NullPointerException If you have not yet created an input document.
*/
protected final int nextToken()
throws IOException, NullPointerException
{
// if( m_in == null ) return -1;
m_pos++;
return m_in.read();
}
/**
* Push back any character to the current input. Does not
* push back a read EOF, though.
*
* @param c Character to push back.
* @throws IOException In case the character cannot be pushed back.
*/
protected void pushBack( int c )
throws IOException
{
if( c != -1 && m_in != null )
{
m_pos--;
m_in.unread( c );
}
}
/**
* Writes HTML for error message. Does not add it to the document, you
* have to do it yourself.
*
* @param error The error string.
* @return An Element containing the error.
*/
public static Element makeError( String error )
{
return new Element("span").setAttribute("class","error").addContent(error);
}
/**
* Cleans a Wiki name. The functionality of this method was changed in 2.6
* so that the list of allowed characters is much larger. Use wikifyLink()
* to get the legacy behaviour.
* <P>
* [ This is a link ] -> This is a link
*
* @param link Link to be cleared. Null is safe, and causes this to return null.
* @return A cleaned link.
*
* @since 2.0
*/
public static String cleanLink( String link )
{
return cleanLink(link, PUNCTUATION_CHARS_ALLOWED);
}
/**
* Cleans a Wiki name based on a list of characters. Also, any multiple
* whitespace is collapsed into a single space, and any leading or trailing
* space is removed.
*
* @param link Link to be cleared. Null is safe, and causes this to return null.
* @param allowedChars Characters which are allowed in the string.
* @return A cleaned link.
*
* @since 2.6
*/
public static String cleanLink( String link, String allowedChars )
{
if( link == null ) return null;
link = link.trim();
StringBuilder clean = new StringBuilder(link.length());
//
// Remove non-alphanumeric characters that should not
// be put inside WikiNames. Note that all valid
// Unicode letters are considered okay for WikiNames.
// It is the problem of the WikiPageProvider to take
// care of actually storing that information.
//
// Also capitalize things, if necessary.
//
boolean isWord = true; // If true, we've just crossed a word boundary
boolean wasSpace = false;
for( int i = 0; i < link.length(); i++ )
{
char ch = link.charAt(i);
//
// Cleans away repetitive whitespace and only uses the first one.
//
if( Character.isWhitespace(ch) )
{
if( wasSpace )
continue;
wasSpace = true;
}
else
{
wasSpace = false;
}
//
// Check if it is allowed to use this char, and capitalize, if necessary.
//
if( Character.isLetterOrDigit( ch ) || allowedChars.indexOf(ch) != -1 )
{
// Is a letter
if( isWord ) ch = Character.toUpperCase( ch );
clean.append( ch );
isWord = false;
}
else
{
isWord = true;
}
}
return clean.toString();
}
/**
* Cleans away extra legacy characters. This method functions exactly
* like pre-2.6 cleanLink()
* <P>
* [ This is a link ] -> ThisIsALink
*
* @param link Link to be cleared. Null is safe, and causes this to return null.
* @return A cleaned link.
* @since 2.6
*/
public static String wikifyLink(String link)
{
return MarkupParser.cleanLink(link, MarkupParser.LEGACY_CHARS_ALLOWED);
}
}
| |
/*
* Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.ecs.model;
import java.io.Serializable;
import com.amazonaws.AmazonWebServiceRequest;
/**
* Container for the parameters to the {@link com.amazonaws.services.ecs.AmazonECS#describeTasks(DescribeTasksRequest) DescribeTasks operation}.
* <p>
* Describes a specified task or tasks.
* </p>
*
* @see com.amazonaws.services.ecs.AmazonECS#describeTasks(DescribeTasksRequest)
*/
public class DescribeTasksRequest extends AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* The short name or full Amazon Resource Name (ARN) of the cluster that
* hosts the task you want to describe. If you do not specify a cluster,
* the default cluster is assumed.
*/
private String cluster;
/**
* A space-separated list of task UUIDs or full Amazon Resource Name
* (ARN) entries.
*/
private com.amazonaws.internal.ListWithAutoConstructFlag<String> tasks;
/**
* The short name or full Amazon Resource Name (ARN) of the cluster that
* hosts the task you want to describe. If you do not specify a cluster,
* the default cluster is assumed.
*
* @return The short name or full Amazon Resource Name (ARN) of the cluster that
* hosts the task you want to describe. If you do not specify a cluster,
* the default cluster is assumed.
*/
public String getCluster() {
return cluster;
}
/**
* The short name or full Amazon Resource Name (ARN) of the cluster that
* hosts the task you want to describe. If you do not specify a cluster,
* the default cluster is assumed.
*
* @param cluster The short name or full Amazon Resource Name (ARN) of the cluster that
* hosts the task you want to describe. If you do not specify a cluster,
* the default cluster is assumed.
*/
public void setCluster(String cluster) {
this.cluster = cluster;
}
/**
* The short name or full Amazon Resource Name (ARN) of the cluster that
* hosts the task you want to describe. If you do not specify a cluster,
* the default cluster is assumed.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param cluster The short name or full Amazon Resource Name (ARN) of the cluster that
* hosts the task you want to describe. If you do not specify a cluster,
* the default cluster is assumed.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public DescribeTasksRequest withCluster(String cluster) {
this.cluster = cluster;
return this;
}
/**
* A space-separated list of task UUIDs or full Amazon Resource Name
* (ARN) entries.
*
* @return A space-separated list of task UUIDs or full Amazon Resource Name
* (ARN) entries.
*/
public java.util.List<String> getTasks() {
if (tasks == null) {
tasks = new com.amazonaws.internal.ListWithAutoConstructFlag<String>();
tasks.setAutoConstruct(true);
}
return tasks;
}
/**
* A space-separated list of task UUIDs or full Amazon Resource Name
* (ARN) entries.
*
* @param tasks A space-separated list of task UUIDs or full Amazon Resource Name
* (ARN) entries.
*/
public void setTasks(java.util.Collection<String> tasks) {
if (tasks == null) {
this.tasks = null;
return;
}
com.amazonaws.internal.ListWithAutoConstructFlag<String> tasksCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<String>(tasks.size());
tasksCopy.addAll(tasks);
this.tasks = tasksCopy;
}
/**
* A space-separated list of task UUIDs or full Amazon Resource Name
* (ARN) entries.
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setTasks(java.util.Collection)} or {@link
* #withTasks(java.util.Collection)} if you want to override the existing
* values.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param tasks A space-separated list of task UUIDs or full Amazon Resource Name
* (ARN) entries.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public DescribeTasksRequest withTasks(String... tasks) {
if (getTasks() == null) setTasks(new java.util.ArrayList<String>(tasks.length));
for (String value : tasks) {
getTasks().add(value);
}
return this;
}
/**
* A space-separated list of task UUIDs or full Amazon Resource Name
* (ARN) entries.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param tasks A space-separated list of task UUIDs or full Amazon Resource Name
* (ARN) entries.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public DescribeTasksRequest withTasks(java.util.Collection<String> tasks) {
if (tasks == null) {
this.tasks = null;
} else {
com.amazonaws.internal.ListWithAutoConstructFlag<String> tasksCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<String>(tasks.size());
tasksCopy.addAll(tasks);
this.tasks = tasksCopy;
}
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getCluster() != null) sb.append("Cluster: " + getCluster() + ",");
if (getTasks() != null) sb.append("Tasks: " + getTasks() );
sb.append("}");
return sb.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getCluster() == null) ? 0 : getCluster().hashCode());
hashCode = prime * hashCode + ((getTasks() == null) ? 0 : getTasks().hashCode());
return hashCode;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (obj instanceof DescribeTasksRequest == false) return false;
DescribeTasksRequest other = (DescribeTasksRequest)obj;
if (other.getCluster() == null ^ this.getCluster() == null) return false;
if (other.getCluster() != null && other.getCluster().equals(this.getCluster()) == false) return false;
if (other.getTasks() == null ^ this.getTasks() == null) return false;
if (other.getTasks() != null && other.getTasks().equals(this.getTasks()) == false) return false;
return true;
}
@Override
public DescribeTasksRequest clone() {
return (DescribeTasksRequest) super.clone();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.bugs;
import static org.junit.Assert.assertEquals;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.Field;
import java.util.Iterator;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.CountDownLatch;
import javax.jms.DeliveryMode;
import javax.jms.Destination;
import javax.jms.JMSException;
import javax.jms.Message;
import javax.jms.MessageConsumer;
import javax.jms.MessageListener;
import javax.jms.MessageProducer;
import javax.jms.Session;
import javax.jms.TextMessage;
import javax.jms.Topic;
import org.apache.activemq.ActiveMQConnection;
import org.apache.activemq.ActiveMQConnectionFactory;
import org.apache.activemq.ActiveMQSession;
import org.apache.activemq.broker.BrokerService;
import org.apache.activemq.broker.region.DurableTopicSubscription;
import org.apache.activemq.broker.region.RegionBroker;
import org.apache.activemq.broker.region.Subscription;
import org.apache.activemq.broker.region.TopicRegion;
import org.apache.activemq.broker.region.policy.PolicyEntry;
import org.apache.activemq.broker.region.policy.PolicyMap;
import org.apache.activemq.command.ConsumerInfo;
import org.apache.activemq.store.kahadb.KahaDBPersistenceAdapter;
import org.apache.activemq.util.SubscriptionKey;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
public class AMQ4062Test {
private BrokerService service;
private PolicyEntry policy;
private ConcurrentMap<SubscriptionKey, DurableTopicSubscription> durableSubscriptions;
private static final int PREFETCH_SIZE_5 = 5;
private String connectionUri;
@Before
public void startBroker() throws IOException, Exception {
service = new BrokerService();
service.setPersistent(true);
service.setDeleteAllMessagesOnStartup(true);
service.setUseJmx(false);
KahaDBPersistenceAdapter pa = new KahaDBPersistenceAdapter();
File dataFile = new File("createData");
pa.setDirectory(dataFile);
pa.setJournalMaxFileLength(1024 * 1024 * 32);
service.setPersistenceAdapter(pa);
policy = new PolicyEntry();
policy.setTopic(">");
policy.setDurableTopicPrefetch(PREFETCH_SIZE_5);
PolicyMap pMap = new PolicyMap();
pMap.setDefaultEntry(policy);
service.setDestinationPolicy(pMap);
service.addConnector("tcp://localhost:0");
service.start();
service.waitUntilStarted();
connectionUri = service.getTransportConnectors().get(0).getPublishableConnectString();
}
public void restartBroker() throws IOException, Exception {
service = new BrokerService();
service.setPersistent(true);
service.setUseJmx(false);
service.setKeepDurableSubsActive(false);
KahaDBPersistenceAdapter pa = new KahaDBPersistenceAdapter();
File dataFile = new File("createData");
pa.setDirectory(dataFile);
pa.setJournalMaxFileLength(1024 * 1024 * 32);
service.setPersistenceAdapter(pa);
policy = new PolicyEntry();
policy.setTopic(">");
policy.setDurableTopicPrefetch(PREFETCH_SIZE_5);
PolicyMap pMap = new PolicyMap();
pMap.setDefaultEntry(policy);
service.setDestinationPolicy(pMap);
service.addConnector("tcp://localhost:0");
service.start();
service.waitUntilStarted();
connectionUri = service.getTransportConnectors().get(0).getPublishableConnectString();
}
@After
public void stopBroker() throws Exception {
service.stop();
service.waitUntilStopped();
service = null;
}
@Test
public void testDirableSubPrefetchRecovered() throws Exception {
PrefetchConsumer consumer = new PrefetchConsumer(true, connectionUri);
consumer.receive();
durableSubscriptions = getDurableSubscriptions();
ConsumerInfo info = getConsumerInfo(durableSubscriptions);
//check if the prefetchSize equals to the size we set in the PolicyEntry
assertEquals(PREFETCH_SIZE_5, info.getPrefetchSize());
consumer.a.countDown();
Producer p = new Producer(connectionUri);
p.send();
p = null;
service.stop();
service.waitUntilStopped();
durableSubscriptions = null;
consumer = null;
stopBroker();
restartBroker();
getDurableSubscriptions();
info = null;
info = getConsumerInfo(durableSubscriptions);
//check if the prefetchSize equals to 0 after persistent storage recovered
//assertEquals(0, info.getPrefetchSize());
consumer = new PrefetchConsumer(false, connectionUri);
consumer.receive();
consumer.a.countDown();
info = null;
info = getConsumerInfo(durableSubscriptions);
//check if the prefetchSize is the default size for durable consumer and the PolicyEntry
//we set earlier take no effect
//assertEquals(100, info.getPrefetchSize());
//info.getPrefetchSize() is 100,it should be 5,because I set the PolicyEntry as follows,
//policy.setDurableTopicPrefetch(PREFETCH_SIZE_5);
assertEquals(5, info.getPrefetchSize());
}
@SuppressWarnings("unchecked")
private ConcurrentMap<SubscriptionKey, DurableTopicSubscription> getDurableSubscriptions() throws NoSuchFieldException, IllegalAccessException {
if (durableSubscriptions != null)
return durableSubscriptions;
RegionBroker regionBroker = (RegionBroker) service.getRegionBroker();
TopicRegion region = (TopicRegion) regionBroker.getTopicRegion();
Field field = TopicRegion.class.getDeclaredField("durableSubscriptions");
field.setAccessible(true);
durableSubscriptions = (ConcurrentMap<SubscriptionKey, DurableTopicSubscription>) field.get(region);
return durableSubscriptions;
}
private ConsumerInfo getConsumerInfo(ConcurrentMap<SubscriptionKey, DurableTopicSubscription> durableSubscriptions) {
ConsumerInfo info = null;
for (Iterator<DurableTopicSubscription> it = durableSubscriptions.values().iterator(); it.hasNext(); ) {
Subscription sub = it.next();
info = sub.getConsumerInfo();
if (info.getSubscriptionName().equals(PrefetchConsumer.SUBSCRIPTION_NAME)) {
return info;
}
}
return null;
}
public class PrefetchConsumer implements MessageListener {
public static final String SUBSCRIPTION_NAME = "A_NAME_ABC_DEF";
private final String user = ActiveMQConnection.DEFAULT_USER;
private final String password = ActiveMQConnection.DEFAULT_PASSWORD;
private final String uri;
private boolean transacted;
ActiveMQConnection connection;
Session session;
MessageConsumer consumer;
private boolean needAck = false;
CountDownLatch a = new CountDownLatch(1);
public PrefetchConsumer(boolean needAck, String uri) {
this.needAck = needAck;
this.uri = uri;
}
public void receive() throws Exception {
ActiveMQConnectionFactory connectionFactory = new ActiveMQConnectionFactory(user, password, uri);
connection = (ActiveMQConnection) connectionFactory.createConnection();
connection.setClientID("3");
connection.start();
session = connection.createSession(transacted, Session.CLIENT_ACKNOWLEDGE);
Destination destination = session.createTopic("topic2");
consumer = session.createDurableSubscriber((Topic) destination, SUBSCRIPTION_NAME);
consumer.setMessageListener(this);
}
@Override
public void onMessage(Message message) {
try {
a.await();
}
catch (InterruptedException e1) {
}
if (needAck) {
try {
message.acknowledge();
consumer.close();
session.close();
connection.close();
}
catch (JMSException e) {
}
}
}
}
public class Producer {
protected final String user = ActiveMQConnection.DEFAULT_USER;
private final String password = ActiveMQConnection.DEFAULT_PASSWORD;
private final String uri;
private boolean transacted;
public Producer(String uri) {
this.uri = uri;
}
public void send() throws Exception {
ActiveMQConnectionFactory connectionFactory = new ActiveMQConnectionFactory(user, password, uri);
ActiveMQConnection connection = (ActiveMQConnection) connectionFactory.createConnection();
connection.start();
ActiveMQSession session = (ActiveMQSession) connection.createSession(transacted, Session.AUTO_ACKNOWLEDGE);
Destination destination = session.createTopic("topic2");
MessageProducer producer = session.createProducer(destination);
producer.setDeliveryMode(DeliveryMode.PERSISTENT);
for (int i = 0; i < 100; i++) {
TextMessage om = session.createTextMessage("hello from producer");
producer.send(om);
}
producer.close();
session.close();
connection.close();
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
package com.azure.storage.file.share;
import com.azure.core.http.rest.Response;
import com.azure.core.util.Context;
import com.azure.storage.common.StorageSharedKeyCredential;
import com.azure.storage.file.share.models.CloseHandlesInfo;
import com.azure.storage.file.share.models.ShareDirectoryInfo;
import com.azure.storage.file.share.models.ShareDirectoryProperties;
import com.azure.storage.file.share.models.ShareDirectorySetMetadataInfo;
import com.azure.storage.file.share.models.ShareFileHttpHeaders;
import com.azure.storage.file.share.models.HandleItem;
import com.azure.storage.file.share.models.NtfsFileAttributes;
import com.azure.storage.file.share.sas.ShareFileSasPermission;
import com.azure.storage.file.share.sas.ShareServiceSasSignatureValues;
import java.time.Duration;
import java.time.LocalDateTime;
import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import java.util.Collections;
import java.util.EnumSet;
import java.util.Map;
/**
* Contains code snippets when generating javadocs through doclets for {@link ShareDirectoryClient}.
*/
public class ShareDirectoryJavaDocCodeSamples {
private String key1 = "key1";
private String value1 = "val1";
/**
* Generates code sample for {@link ShareDirectoryClient} instantiation.
*/
public void initialization() {
// BEGIN: com.azure.storage.file.share.ShareDirectoryClient.instantiation
ShareDirectoryClient client = new ShareFileClientBuilder()
.connectionString("${connectionString}")
.endpoint("${endpoint}")
.buildDirectoryClient();
// END: com.azure.storage.file.share.ShareDirectoryClient.instantiation
}
/**
* Generates code sample for creating a {@link ShareDirectoryClient} with SAS token
*
* @return An instance of {@link ShareDirectoryClient}
*/
public ShareDirectoryClient createClientWithSASToken() {
// BEGIN: com.azure.storage.file.share.ShareDirectoryClient.instantiation.sastoken
ShareDirectoryClient shareDirectoryClient = new ShareFileClientBuilder()
.endpoint("https://${accountName}.file.core.windows.net?${SASToken}")
.shareName("myshare")
.resourcePath("mydirectory")
.buildDirectoryClient();
// END: com.azure.storage.file.share.ShareDirectoryClient.instantiation.sastoken
return shareDirectoryClient;
}
/**
* Generates code sample for creating a {@link ShareDirectoryClient} with SAS token
*
* @return An instance of {@link ShareDirectoryClient}
*/
public ShareDirectoryClient createClientWithCredential() {
// BEGIN: com.azure.storage.file.share.ShareDirectoryClient.instantiation.credential
ShareDirectoryClient shareDirectoryClient = new ShareFileClientBuilder()
.endpoint("https://${accountName}.file.core.windows.net")
.sasToken("${SASTokenQueryParams}")
.shareName("myshare")
.resourcePath("mydirectory")
.buildDirectoryClient();
// END: com.azure.storage.file.share.ShareDirectoryClient.instantiation.credential
return shareDirectoryClient;
}
/**
* Generates code sample for creating a {@link ShareDirectoryClient} with {@code connectionString} which turns into
* {@link StorageSharedKeyCredential}
*
* @return An instance of {@link ShareDirectoryClient}
*/
public ShareDirectoryClient createClientWithConnectionString() {
// BEGIN: com.azure.storage.file.share.ShareDirectoryClient.instantiation.connectionstring
String connectionString = "DefaultEndpointsProtocol=https;AccountName={name};AccountKey={key}"
+ ";EndpointSuffix={core.windows.net}";
ShareDirectoryClient shareDirectoryClient = new ShareFileClientBuilder()
.connectionString(connectionString)
.shareName("myshare")
.resourcePath("mydirectory")
.buildDirectoryClient();
// END: com.azure.storage.file.share.ShareDirectoryClient.instantiation.connectionstring
return shareDirectoryClient;
}
/**
* Generates a code sample for using {@link ShareDirectoryClient#create()}
*/
public void createDirectory() {
ShareDirectoryClient shareDirectoryClient = createClientWithSASToken();
// BEGIN: com.azure.storage.file.share.ShareDirectoryClient.createDirectory
shareDirectoryClient.create();
System.out.println("Completed creating the directory. ");
// END: com.azure.storage.file.share.ShareDirectoryClient.createDirectory
}
/**
* Generates a code sample for using {@link ShareDirectoryClient#createWithResponse(FileSmbProperties, String, Map,
* Duration, Context)}
*/
public void createWithResponse() {
ShareDirectoryClient shareDirectoryClient = createClientWithSASToken();
// BEGIN: com.azure.storage.file.share.ShareDirectoryClient.createWithResponse#FileSmbProperties-String-Map-Duration-Context
FileSmbProperties smbProperties = new FileSmbProperties();
String filePermission = "filePermission";
Response<ShareDirectoryInfo> response = shareDirectoryClient.createWithResponse(smbProperties, filePermission,
Collections.singletonMap("directory", "metadata"), Duration.ofSeconds(1), new Context(key1, value1));
System.out.println("Completed creating the directory with status code: " + response.getStatusCode());
// END: com.azure.storage.file.share.ShareDirectoryClient.createWithResponse#FileSmbProperties-String-Map-Duration-Context
}
/**
* Generates a code sample for using {@link ShareDirectoryClient#createSubdirectory(String)}
*/
public void createSubdirectory() {
ShareDirectoryClient shareDirectoryClient = createClientWithSASToken();
// BEGIN: com.azure.storage.file.share.ShareDirectoryClient.createSubdirectory#string
shareDirectoryClient.createSubdirectory("subdir");
System.out.println("Completed creating the subdirectory.");
// END: com.azure.storage.file.share.ShareDirectoryClient.createSubdirectory#string
}
/**
* Generates a code sample for using {@link ShareDirectoryClient#createSubdirectoryWithResponse(String,
* FileSmbProperties, String, Map, Duration, Context)}
*/
public void createSubdirectoryMaxOverload() {
ShareDirectoryClient shareDirectoryClient = createClientWithSASToken();
// BEGIN: com.azure.storage.file.share.ShareDirectoryClient.createSubdirectoryWithResponse#String-FileSmbProperties-String-Map-Duration-Context
FileSmbProperties smbProperties = new FileSmbProperties();
String filePermission = "filePermission";
Response<ShareDirectoryClient> response = shareDirectoryClient.createSubdirectoryWithResponse("subdir",
smbProperties, filePermission, Collections.singletonMap("directory", "metadata"),
Duration.ofSeconds(1), new Context(key1, value1));
System.out.printf("Creating the sub directory completed with status code %d", response.getStatusCode());
// END: com.azure.storage.file.share.ShareDirectoryClient.createSubdirectoryWithResponse#String-FileSmbProperties-String-Map-Duration-Context
}
/**
* Generates a code sample for using {@link ShareDirectoryClient#createFile(String, long)}
*/
public void createFile() {
ShareDirectoryClient shareDirectoryClient = createClientWithSASToken();
// BEGIN: com.azure.storage.file.share.ShareDirectoryClient.createFile#string-long
ShareFileClient response = shareDirectoryClient.createFile("myfile", 1024);
System.out.println("Completed creating the file: " + response);
// END: com.azure.storage.file.share.ShareDirectoryClient.createFile#string-long
}
/**
* Generates a code sample for using {@link ShareDirectoryClient#createFileWithResponse(String, long, ShareFileHttpHeaders,
* FileSmbProperties, String, Map, Duration, Context)}
*/
public void createFileMaxOverload() {
ShareDirectoryClient shareDirectoryClient = createClientWithSASToken();
// BEGIN: com.azure.storage.file.share.ShareDirectoryClient.createFile#com.azure.storage.file.share.ShareDirectoryAsyncClient.createFileWithResponse#String-long-ShareFileHttpHeaders-FileSmbProperties-String-Map-duration-context
ShareFileHttpHeaders httpHeaders = new ShareFileHttpHeaders()
.setContentType("text/html")
.setContentEncoding("gzip")
.setContentLanguage("en")
.setCacheControl("no-transform")
.setContentDisposition("attachment");
FileSmbProperties smbProperties = new FileSmbProperties()
.setNtfsFileAttributes(EnumSet.of(NtfsFileAttributes.READ_ONLY))
.setFileCreationTime(OffsetDateTime.now())
.setFileLastWriteTime(OffsetDateTime.now())
.setFilePermissionKey("filePermissionKey");
String filePermission = "filePermission";
// NOTE: filePermission and filePermissionKey should never be both set
Response<ShareFileClient> response = shareDirectoryClient.createFileWithResponse("myFile", 1024,
httpHeaders, smbProperties, filePermission, Collections.singletonMap("directory", "metadata"),
Duration.ofSeconds(1), new Context(key1, value1));
System.out.println("Completed creating the file with status code: " + response.getStatusCode());
// END: com.azure.storage.file.share.ShareDirectoryClient.createFile#com.azure.storage.file.share.ShareDirectoryAsyncClient.createFileWithResponse#String-long-ShareFileHttpHeaders-FileSmbProperties-String-Map-duration-context
}
/**
* Generates a code sample for using {@link ShareDirectoryClient#listFilesAndDirectories()}
*/
public void listDirectoriesAndFiles() {
ShareDirectoryClient shareDirectoryClient = createClientWithSASToken();
// BEGIN: com.azure.storage.file.share.ShareDirectoryClient.listFilesAndDirectories
shareDirectoryClient.listFilesAndDirectories().forEach(
fileRef -> System.out.printf("Is the resource a directory? %b. The resource name is: %s.",
fileRef.isDirectory(), fileRef.getName())
);
// END: com.azure.storage.file.share.ShareDirectoryClient.listFilesAndDirectories
}
/**
* Generates a code sample for using {@link ShareDirectoryClient#listFilesAndDirectories(String, Integer, Duration,
* Context)}
*/
public void listDirectoriesAndFilesMaxOverload() {
ShareDirectoryClient shareDirectoryClient = createClientWithSASToken();
// BEGIN: com.azure.storage.file.share.ShareDirectoryClient.listFilesAndDirectories#string-integer-duration-context
shareDirectoryClient.listFilesAndDirectories("subdir", 10, Duration.ofSeconds(1),
new Context(key1, value1)).forEach(
fileRef -> System.out.printf("Is the resource a directory? %b. The resource name is: %s.",
fileRef.isDirectory(), fileRef.getName())
);
// END: com.azure.storage.file.share.ShareDirectoryClient.listFilesAndDirectories#string-integer-duration-context
}
/**
* Generates a code sample for using {@link ShareDirectoryClient#deleteFile(String)} ()}
*/
public void deleteFile() {
ShareDirectoryClient shareDirectoryClient = createClientWithSASToken();
// BEGIN: com.azure.storage.file.share.ShareDirectoryClient.deleteFile#string
shareDirectoryClient.deleteFile("myfile");
System.out.println("Completed deleting the file.");
// END: com.azure.storage.file.share.ShareDirectoryClient.deleteFile#string
}
/**
* Generates a code sample for using {@link ShareDirectoryClient#deleteFileWithResponse(String, Duration, Context)}
*/
public void deleteFileWithResponse() {
ShareDirectoryClient shareDirectoryClient = createClientWithSASToken();
// BEGIN: com.azure.storage.file.share.ShareDirectoryClient.deleteFileWithResponse#string-duration-context
Response<Void> response = shareDirectoryClient.deleteFileWithResponse("myfile",
Duration.ofSeconds(1), new Context(key1, value1));
System.out.println("Completed deleting the file with status code: " + response.getStatusCode());
// END: com.azure.storage.file.share.ShareDirectoryClient.deleteFileWithResponse#string-duration-context
}
/**
* Generates a code sample for using {@link ShareDirectoryClient#deleteSubdirectory(String)}
*/
public void deleteSubdirectory() {
ShareDirectoryClient shareDirectoryClient = createClientWithSASToken();
// BEGIN: com.azure.storage.file.share.ShareDirectoryClient.deleteSubdirectory#string
shareDirectoryClient.deleteSubdirectory("mysubdirectory");
System.out.println("Complete deleting the subdirectory.");
// END: com.azure.storage.file.share.ShareDirectoryClient.deleteSubdirectory#string
}
/**
* Generates a code sample for using {@link ShareDirectoryClient#deleteSubdirectoryWithResponse(String, Duration,
* Context)}
*/
public void deleteSubdirectoryWithResponse() {
ShareDirectoryClient shareDirectoryClient = createClientWithSASToken();
// BEGIN: com.azure.storage.file.share.ShareDirectoryClient.deleteSubdirectoryWithResponse#string-duration-context
Response<Void> response = shareDirectoryClient.deleteSubdirectoryWithResponse("mysubdirectory",
Duration.ofSeconds(1), new Context(key1, value1));
System.out.println("Completed deleting the subdirectory with status code: " + response.getStatusCode());
// END: com.azure.storage.file.share.ShareDirectoryClient.deleteSubdirectoryWithResponse#string-duration-context
}
/**
* Generates a code sample for using {@link ShareDirectoryClient#delete()}
*/
public void deleteDirectory() {
ShareDirectoryClient shareDirectoryClient = createClientWithSASToken();
// BEGIN: com.azure.storage.file.share.ShareDirectoryClient.delete
shareDirectoryClient.delete();
System.out.println("Completed deleting the file.");
// END: com.azure.storage.file.share.ShareDirectoryClient.delete
}
/**
* Generates a code sample for using {@link ShareDirectoryClient#deleteWithResponse(Duration, Context)}
*/
public void deleteWithResponse() {
ShareDirectoryClient shareDirectoryClient = createClientWithSASToken();
// BEGIN: com.azure.storage.file.share.ShareDirectoryClient.deleteWithResponse#duration-context
Response<Void> response = shareDirectoryClient.deleteWithResponse(Duration.ofSeconds(1), new Context(key1, value1));
System.out.println("Completed deleting the file with status code: " + response.getStatusCode());
// END: com.azure.storage.file.share.ShareDirectoryClient.deleteWithResponse#duration-context
}
/**
* Generates a code sample for using {@link ShareDirectoryClient#getProperties()}
*/
public void getProperties() {
ShareDirectoryClient shareDirectoryClient = createClientWithSASToken();
// BEGIN: com.azure.storage.file.share.ShareDirectoryClient.getProperties
ShareDirectoryProperties response = shareDirectoryClient.getProperties();
System.out.printf("Directory latest modified date is %s.", response.getLastModified());
// END: com.azure.storage.file.share.ShareDirectoryClient.getProperties
}
/**
* Generates a code sample for using {@link ShareDirectoryClient#getPropertiesWithResponse(Duration, Context)}
*/
public void getPropertiesWithResponse() {
ShareDirectoryClient shareDirectoryClient = createClientWithSASToken();
// BEGIN: com.azure.storage.file.share.ShareDirectoryClient.getPropertiesWithResponse#duration-Context
Response<ShareDirectoryProperties> response = shareDirectoryClient.getPropertiesWithResponse(
Duration.ofSeconds(1), new Context(key1, value1));
System.out.printf("Directory latest modified date is %s.", response.getValue().getLastModified());
// END: com.azure.storage.file.share.ShareDirectoryClient.getPropertiesWithResponse#duration-Context
}
/**
* Generates a code sample for using {@link ShareDirectoryClient#setProperties(FileSmbProperties, String)}
*/
public void setProperties() {
ShareDirectoryClient shareDirectoryClient = createClientWithSASToken();
// BEGIN: com.azure.storage.file.share.ShareDirectoryClient.setProperties#FileSmbProperties-String
FileSmbProperties smbProperties = new FileSmbProperties();
String filePermission = "filePermission";
ShareDirectoryInfo response = shareDirectoryClient.setProperties(smbProperties, filePermission);
System.out.printf("Directory latest modified date is %s.", response.getLastModified());
// END: com.azure.storage.file.share.ShareDirectoryClient.setProperties#FileSmbProperties-String
}
/**
* Generates a code sample for using {@link ShareDirectoryClient#setPropertiesWithResponse(FileSmbProperties, String,
* Duration, Context)}
*/
public void setPropertiesWithResponse() {
ShareDirectoryClient shareDirectoryClient = createClientWithSASToken();
// BEGIN: com.azure.storage.file.share.ShareDirectoryClient.setPropertiesWithResponse#FileSmbProperties-String-Duration-Context
FileSmbProperties smbProperties = new FileSmbProperties();
String filePermission = "filePermission";
Response<ShareDirectoryInfo> response = shareDirectoryClient.setPropertiesWithResponse(smbProperties, filePermission,
Duration.ofSeconds(1), new Context(key1, value1));
System.out.printf("Directory latest modified date is %s.", response.getValue().getLastModified());
// END: com.azure.storage.file.share.ShareDirectoryClient.setPropertiesWithResponse#FileSmbProperties-String-Duration-Context
}
/**
* Generates a code sample for using {@link ShareDirectoryClient#setMetadata(Map)}
*/
public void setMetadata() {
ShareDirectoryClient shareDirectoryClient = createClientWithSASToken();
// BEGIN: com.azure.storage.file.share.ShareDirectoryClient.setMetadata#map
ShareDirectorySetMetadataInfo response =
shareDirectoryClient.setMetadata(Collections.singletonMap("directory", "updatedMetadata"));
System.out.printf("Setting the directory metadata completed with updated etag %s", response.getETag());
// END: com.azure.storage.file.share.ShareDirectoryClient.setMetadata#map
}
/**
* Generates a code sample for using {@link ShareDirectoryClient#setMetadataWithResponse(Map, Duration, Context)}
*/
public void setMetadataWithResponse() {
ShareDirectoryClient shareDirectoryClient = createClientWithSASToken();
// BEGIN: com.azure.storage.file.share.ShareDirectoryClient.setMetadataWithResponse#map-duration-context
Response<ShareDirectorySetMetadataInfo> response =
shareDirectoryClient.setMetadataWithResponse(Collections.singletonMap("directory", "updatedMetadata"),
Duration.ofSeconds(1), new Context(key1, value1));
System.out.printf("Setting the directory metadata completed with updated etag %d", response.getStatusCode());
// END: com.azure.storage.file.share.ShareDirectoryClient.setMetadataWithResponse#map-duration-context
}
/**
* Generates a code sample for using {@link ShareDirectoryClient#setMetadata(Map)} to clear the metadata.
*/
public void clearSetMetadata() {
ShareDirectoryClient shareDirectoryClient = createClientWithSASToken();
// BEGIN: com.azure.storage.file.share.ShareDirectoryClient.setMetadata#map.clearMetadata
ShareDirectorySetMetadataInfo response = shareDirectoryClient.setMetadata(null);
System.out.printf("Cleared metadata.");
// END: com.azure.storage.file.share.ShareDirectoryClient.setMetadata#map.clearMetadata
}
/**
* Generates a code sample for using {@link ShareDirectoryClient#setMetadata(Map)} to clear the metadata.
*/
public void clearMetadata() {
ShareDirectoryClient shareDirectoryClient = createClientWithSASToken();
// BEGIN: com.azure.storage.file.share.ShareDirectoryClient.setMetadataWithResponse#map-duration-context.clearMetadata
Response<ShareDirectorySetMetadataInfo> response = shareDirectoryClient.setMetadataWithResponse(null,
Duration.ofSeconds(1), new Context(key1, value1));
System.out.printf("Directory latest modified date is %s.", response.getStatusCode());
// END: com.azure.storage.file.share.ShareDirectoryClient.setMetadataWithResponse#map-duration-context.clearMetadata
}
/**
* Generates a code sample for using {@link ShareDirectoryClient#listHandles(Integer, boolean, Duration, Context)}
*/
public void listHandles() {
ShareDirectoryClient shareDirectoryClient = createClientWithSASToken();
// BEGIN: com.azure.storage.file.share.ShareDirectoryClient.listHandles#Integer-boolean-duration-context
Iterable<HandleItem> result = shareDirectoryClient.listHandles(10, true, Duration.ofSeconds(1),
new Context(key1, value1));
System.out.printf("Get handles completed with handle id %s", result.iterator().next().getHandleId());
// END: com.azure.storage.file.share.ShareDirectoryClient.listHandles#Integer-boolean-duration-context
}
/**
* Code snippet for {@link ShareDirectoryClient#forceCloseHandle(String)}
*/
public void forceCloseHandle() {
ShareDirectoryClient shareDirectoryClient = createClientWithConnectionString();
// BEGIN: com.azure.storage.file.share.ShareDirectoryClient.forceCloseHandle#String
shareDirectoryClient.listHandles(null, true, Duration.ofSeconds(30), Context.NONE).forEach(handleItem -> {
shareDirectoryClient.forceCloseHandle(handleItem.getHandleId());
System.out.printf("Closed handle %s on resource %s%n", handleItem.getHandleId(), handleItem.getPath());
});
// END: com.azure.storage.file.share.ShareDirectoryClient.forceCloseHandle#String
}
/**
* Code snippet for {@link ShareDirectoryClient#forceCloseHandleWithResponse(String, Duration, Context)}.
*/
public void forceCloseHandleWithResponse() {
ShareDirectoryClient shareDirectoryClient = createClientWithConnectionString();
// BEGIN: com.azure.storage.file.share.ShareDirectoryClient.forceCloseHandleWithResponse#String-Duration-Context
shareDirectoryClient.listHandles(null, true, Duration.ofSeconds(30), Context.NONE).forEach(handleItem -> {
Response<CloseHandlesInfo> closeResponse = shareDirectoryClient.forceCloseHandleWithResponse(
handleItem.getHandleId(), Duration.ofSeconds(30), Context.NONE);
System.out.printf("Closing handle %s on resource %s completed with status code %d%n",
handleItem.getHandleId(), handleItem.getPath(), closeResponse.getStatusCode());
});
// END: com.azure.storage.file.share.ShareDirectoryClient.forceCloseHandleWithResponse#String-Duration-Context
}
/**
* Code snippet for {@link ShareDirectoryClient#forceCloseAllHandles(boolean, Duration, Context)}.
*/
public void forceCloseAllHandles() {
ShareDirectoryClient shareDirectoryClient = createClientWithConnectionString();
// BEGIN: com.azure.storage.file.share.ShareDirectoryClient.forceCloseAllHandles#boolean-Duration-Context
CloseHandlesInfo closeHandlesInfo = shareDirectoryClient.forceCloseAllHandles(true, Duration.ofSeconds(30),
Context.NONE);
System.out.printf("Closed %d open handles on the directory%n", closeHandlesInfo.getClosedHandles());
// END: com.azure.storage.file.share.ShareDirectoryClient.forceCloseAllHandles#boolean-Duration-Context
}
/**
* Generates a code sample for using {@link ShareDirectoryClient#getShareSnapshotId()}
*/
public void getShareSnapshotId() {
// BEGIN: com.azure.storage.file.share.ShareDirectoryClient.getShareSnapshotId
OffsetDateTime currentTime = OffsetDateTime.of(LocalDateTime.now(), ZoneOffset.UTC);
ShareDirectoryClient shareDirectoryClient = new ShareFileClientBuilder()
.endpoint("https://${accountName}.file.core.windows.net")
.sasToken("${SASToken}")
.shareName("myshare")
.resourcePath("mydirectory")
.snapshot(currentTime.toString())
.buildDirectoryClient();
System.out.printf("Snapshot ID: %s%n", shareDirectoryClient.getShareSnapshotId());
// END: com.azure.storage.file.share.ShareDirectoryClient.getShareSnapshotId
}
/**
* Generates a code sample for using {@link ShareDirectoryClient#getShareName()}
*/
public void getShareName() {
ShareDirectoryClient directoryAsyncClient = createClientWithSASToken();
// BEGIN: com.azure.storage.file.share.ShareDirectoryClient.getShareName
String shareName = directoryAsyncClient.getShareName();
System.out.println("The share name of the directory is " + shareName);
// END: com.azure.storage.file.share.ShareDirectoryClient.getShareName
}
/**
* Generates a code sample for using {@link ShareDirectoryClient#getDirectoryPath()}
*/
public void getDirectoryPath() {
ShareDirectoryClient shareDirectoryClient = createClientWithSASToken();
// BEGIN: com.azure.storage.file.share.ShareDirectoryClient.getDirectoryPath
String directoryPath = shareDirectoryClient.getDirectoryPath();
System.out.println("The name of the directory is " + directoryPath);
// END: com.azure.storage.file.share.ShareDirectoryClient.getDirectoryPath
}
/**
* Code snippet for {@link ShareDirectoryClient#generateSas(ShareServiceSasSignatureValues)}
*/
public void generateSas() {
ShareDirectoryClient shareDirectoryClient = createClientWithCredential();
// BEGIN: com.azure.storage.file.share.ShareDirectoryClient.generateSas#ShareServiceSasSignatureValues
OffsetDateTime expiryTime = OffsetDateTime.now().plusDays(1);
ShareFileSasPermission permission = new ShareFileSasPermission().setReadPermission(true);
ShareServiceSasSignatureValues values = new ShareServiceSasSignatureValues(expiryTime, permission)
.setStartTime(OffsetDateTime.now());
shareDirectoryClient.generateSas(values); // Client must be authenticated via StorageSharedKeyCredential
// END: com.azure.storage.file.share.ShareDirectoryClient.generateSas#ShareServiceSasSignatureValues
}
}
| |
/*
* Copyright 2015 Lithium Technologies, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.lithium.flow.config;
import static com.google.common.base.Preconditions.checkNotNull;
import com.lithium.flow.config.exception.IllegalConfigException;
import com.lithium.flow.config.loaders.ClasspathConfigLoader;
import com.lithium.flow.config.loaders.FileConfigLoader;
import com.lithium.flow.config.parsers.AppendConfigParser;
import com.lithium.flow.config.parsers.CommentConfigParser;
import com.lithium.flow.config.parsers.EqualsConfigParser;
import com.lithium.flow.config.parsers.IncludeConfigParser;
import com.lithium.flow.config.parsers.LoaderConfigParser;
import com.lithium.flow.config.parsers.RequiredConfigParser;
import com.lithium.flow.config.parsers.SetNullConfigParser;
import com.lithium.flow.config.parsers.SubtractConfigParser;
import com.lithium.flow.store.MemoryStore;
import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Deque;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
/**
* @author Matt Ayres
*/
public class BaseConfigBuilder implements ConfigBuilder {
private final Map<String, String> rawMap = new LinkedHashMap<>();
private final Set<String> requiredKeys = new HashSet<>();
private final Deque<String> pathDeque = new ArrayDeque<>();
private final Deque<String> prefixDeque = new ArrayDeque<>();
private final List<ConfigLoader> loaders = new ArrayList<>();
private final List<ConfigParser> parsers = new ArrayList<>();
private final List<ConfigWatcher> watchers = new ArrayList<>();
private String name = "unknown";
private boolean allowFileNotFound;
private boolean allowUndefined;
private boolean allowRequiredKeys;
private final Config defaults;
public BaseConfigBuilder() {
this(null);
}
public BaseConfigBuilder(@Nullable Config defaults) {
this.defaults = defaults;
loaders.add(new FileConfigLoader());
loaders.add(new ClasspathConfigLoader());
parsers.add(new CommentConfigParser());
parsers.add(new IncludeConfigParser());
parsers.add(new LoaderConfigParser(this::addLoader));
parsers.add(new AppendConfigParser());
parsers.add(new SubtractConfigParser());
parsers.add(new SetNullConfigParser());
parsers.add(new RequiredConfigParser());
parsers.add(new EqualsConfigParser());
}
@Override
@Nonnull
public ConfigBuilder pushPrefix(@Nonnull String prefix) {
prefixDeque.push(checkNotNull(prefix));
return this;
}
@Override
@Nonnull
public ConfigBuilder popPrefix() {
prefixDeque.pop();
return this;
}
@Nonnull
private String getPrefixed(@Nonnull String key) {
StringBuilder sb = new StringBuilder();
prefixDeque.forEach(prefix -> sb.append(prefix).append('.'));
sb.append(key);
return sb.toString();
}
@Override
@Nonnull
public final BaseConfigBuilder addLoader(@Nonnull ConfigLoader loader) {
loaders.add(checkNotNull(loader));
return this;
}
@Override
@Nonnull
public final BaseConfigBuilder resetLoaders() {
loaders.clear();
return this;
}
@Override
@Nonnull
public final BaseConfigBuilder addParser(@Nonnull ConfigParser parser) {
parsers.add(checkNotNull(parser));
return this;
}
@Override
@Nonnull
public final BaseConfigBuilder resetParsers() {
parsers.clear();
return this;
}
@Override
@Nonnull
public final BaseConfigBuilder addWatcher(@Nonnull ConfigWatcher watcher) {
watchers.add(checkNotNull(watcher));
watcher.onStart(this);
return this;
}
@Override
@Nonnull
public final BaseConfigBuilder allowFileNotFound(boolean allowFileNotFound) {
this.allowFileNotFound = allowFileNotFound;
return this;
}
@Override
@Nonnull
public final BaseConfigBuilder allowUndefined(boolean allowUndefined) {
this.allowUndefined = allowUndefined;
return this;
}
@Override
@Nonnull
public final BaseConfigBuilder allowRequiredKeys(boolean allowRequiredKeys) {
this.allowRequiredKeys = allowRequiredKeys;
return this;
}
@Override
public final boolean allowFileNotFound() {
return allowFileNotFound;
}
@Override
public final boolean allowUndefined() {
return allowUndefined;
}
@Override
public final boolean allowRequiredKeys() {
return allowRequiredKeys;
}
@Override
@Nonnull
public final BaseConfigBuilder setName(@Nonnull String name) {
this.name = checkNotNull(name);
return this;
}
@Override
@Nonnull
public final Config build() {
Config config = new BaseConfig(name, new MemoryStore(rawMap), defaults, allowUndefined);
if (allowRequiredKeys) {
for (String key : requiredKeys) {
if (config.getString(key, "").isEmpty()) {
throw new IllegalConfigException(key);
}
}
}
return config;
}
@Override
@Nonnull
public final BaseConfigBuilder setString(@Nonnull String key, @Nonnull String value) {
checkNotNull(key);
checkNotNull(value);
String prefixedKey = getPrefixed(key);
rawMap.put(prefixedKey, value);
watchers.forEach(watcher -> watcher.onSet(prefixedKey, value));
return this;
}
@Override
@Nonnull
public final BaseConfigBuilder setAll(@Nonnull Map<String, String> configValues) {
configValues.forEach(this::setString);
return this;
}
@Override
@Nonnull
public final BaseConfigBuilder addAll(@Nonnull Config config) {
setAll(config.asRawMap());
return this;
}
@Override
@Nonnull
public final BaseConfigBuilder removeKey(@Nonnull String key) {
checkNotNull(key);
key = getPrefixed(key);
rawMap.remove(key);
return this;
}
@Nonnull
@Override
public final BaseConfigBuilder requireKey(@Nonnull String key) {
requiredKeys.add(checkNotNull(key));
return this;
}
@Override
public boolean containsKey(@Nonnull String key) {
checkNotNull(key);
key = getPrefixed(key);
return rawMap.containsKey(key) || (defaults != null && defaults.containsKey(key));
}
@Override
@Nullable
public final String getString(@Nonnull String key) {
checkNotNull(key);
key = getPrefixed(key);
String value = rawMap.get(key);
return value != null ? value : defaults != null ? defaults.getString(key) : null;
}
@Override
@Nonnull
public final BaseConfigBuilder include(@Nonnull String path) throws IOException {
if (pathDeque.contains(checkNotNull(path))) {
throw new IOException("include recursion detected: " + path);
}
InputStream in = getInputStream(path);
if (in != null) {
pathDeque.push(path);
watchers.forEach(watcher -> watcher.onEnter(path));
try (BufferedReader reader = new BufferedReader(new InputStreamReader(in, StandardCharsets.UTF_8))) {
String line;
while ((line = reader.readLine()) != null) {
parseLine(line);
}
}
pathDeque.pop();
watchers.forEach(watcher -> watcher.onExit(path));
}
return this;
}
@Nonnull
public final BaseConfigBuilder parseLine(@Nonnull String line) throws IOException {
for (ConfigParser parser : parsers) {
if (parser.parseLine(line, this)) {
break;
}
}
return this;
}
@Nullable
private InputStream getInputStream(@Nonnull String path) throws IOException {
for (ConfigLoader loader : loaders) {
InputStream in = loader.getInputStream(path);
if (in != null) {
return in;
}
}
if (allowFileNotFound) {
return null;
} else {
throw new FileNotFoundException(path);
}
}
}
| |
/*
* Copyright (c) 1998, 2009, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*/
package com.sun.corba.se.impl.resolver;
import java.util.List ;
import java.util.Map ;
import java.util.Comparator ;
import java.util.Iterator ;
import java.util.HashMap ;
import java.util.ArrayList ;
import java.util.Collections ;
import org.omg.CosNaming.NamingContextExt ;
import org.omg.CosNaming.NamingContextExtHelper ;
import com.sun.corba.se.spi.ior.IOR;
import com.sun.corba.se.spi.ior.IORTemplate;
import com.sun.corba.se.spi.ior.ObjectKey;
import com.sun.corba.se.spi.ior.IORFactories;
import com.sun.corba.se.spi.ior.ObjectKeyFactory ;
import com.sun.corba.se.spi.ior.iiop.IIOPAddress;
import com.sun.corba.se.spi.ior.iiop.IIOPProfile ;
import com.sun.corba.se.spi.ior.iiop.IIOPProfileTemplate ;
import com.sun.corba.se.spi.ior.iiop.IIOPFactories ;
import com.sun.corba.se.spi.ior.iiop.GIOPVersion;
import com.sun.corba.se.spi.ior.iiop.AlternateIIOPAddressComponent;
import com.sun.corba.se.spi.logging.CORBALogDomains ;
import com.sun.corba.se.spi.orb.Operation;
import com.sun.corba.se.spi.orb.ORB;
import com.sun.corba.se.spi.resolver.Resolver;
import com.sun.corba.se.impl.encoding.EncapsInputStream;
import com.sun.corba.se.impl.logging.ORBUtilSystemException;
import com.sun.corba.se.impl.logging.OMGSystemException;
import com.sun.corba.se.impl.naming.namingutil.INSURLHandler;
import com.sun.corba.se.impl.naming.namingutil.IIOPEndpointInfo;
import com.sun.corba.se.impl.naming.namingutil.INSURL;
import com.sun.corba.se.impl.naming.namingutil.CorbalocURL;
import com.sun.corba.se.impl.naming.namingutil.CorbanameURL;
import com.sun.corba.se.impl.orbutil.ORBConstants;
import com.sun.corba.se.impl.orbutil.ORBUtility;
/**
* This class provides an Operation that converts from CORBA INS URL strings into
* CORBA object references. It will eventually become extensible, but for now it
* simply encapsulates the existing implementation. Once the full extensibility
* is in place, we want this operation to convert string to INSURL, which has mainly
* a public resolver method that returns an object reference.
*
* @author Hemanth
* @author Ken
*/
public class INSURLOperationImpl implements Operation
{
ORB orb;
ORBUtilSystemException wrapper ;
OMGSystemException omgWrapper ;
Resolver bootstrapResolver ;
// Root Naming Context for default resolution of names.
private NamingContextExt rootNamingContextExt;
private Object rootContextCacheLock = new Object() ;
// The URLHandler to parse INS URL's
private INSURLHandler insURLHandler = INSURLHandler.getINSURLHandler() ;
public INSURLOperationImpl( ORB orb, Resolver bootstrapResolver )
{
this.orb = orb ;
wrapper = ORBUtilSystemException.get( orb,
CORBALogDomains.ORB_RESOLVER ) ;
omgWrapper = OMGSystemException.get( orb,
CORBALogDomains.ORB_RESOLVER ) ;
this.bootstrapResolver = bootstrapResolver ;
}
private static final int NIBBLES_PER_BYTE = 2 ;
private static final int UN_SHIFT = 4 ; // "UPPER NIBBLE" shift factor for <<
/** This static method takes a Stringified IOR and converts it into IOR object.
* It is the caller's responsibility to only pass strings that start with "IOR:".
*/
private org.omg.CORBA.Object getIORFromString( String str )
{
// Length must be even for str to be valid
if ( (str.length() & 1) == 1 )
throw wrapper.badStringifiedIorLen() ;
byte[] buf = new byte[(str.length() - ORBConstants.STRINGIFY_PREFIX.length()) / NIBBLES_PER_BYTE];
for (int i=ORBConstants.STRINGIFY_PREFIX.length(), j=0; i < str.length(); i +=NIBBLES_PER_BYTE, j++) {
buf[j] = (byte)((ORBUtility.hexOf(str.charAt(i)) << UN_SHIFT) & 0xF0);
buf[j] |= (byte)(ORBUtility.hexOf(str.charAt(i+1)) & 0x0F);
}
EncapsInputStream s = new EncapsInputStream(orb, buf, buf.length,
orb.getORBData().getGIOPVersion());
s.consumeEndian();
return s.read_Object() ;
}
public Object operate( Object arg )
{
if (arg instanceof String) {
String str = (String)arg ;
if (str.startsWith( ORBConstants.STRINGIFY_PREFIX ))
// XXX handle this as just another URL scheme
return getIORFromString( str ) ;
else {
INSURL insURL = insURLHandler.parseURL( str ) ;
if (insURL == null)
throw omgWrapper.soBadSchemeName() ;
return resolveINSURL( insURL ) ;
}
}
throw wrapper.stringExpected() ;
}
private org.omg.CORBA.Object resolveINSURL( INSURL theURLObject ) {
// XXX resolve should be a method on INSURL
if( theURLObject.isCorbanameURL() ) {
return resolveCorbaname( (CorbanameURL)theURLObject );
} else {
return resolveCorbaloc( (CorbalocURL)theURLObject );
}
}
/**
* resolves a corbaloc: url that is encapsulated in a CorbalocURL object.
*
* @return the CORBA.Object if resolution is successful
*/
private org.omg.CORBA.Object resolveCorbaloc(
CorbalocURL theCorbaLocObject )
{
org.omg.CORBA.Object result = null;
// If RIR flag is true use the Bootstrap protocol
if( theCorbaLocObject.getRIRFlag( ) ) {
result = bootstrapResolver.resolve(theCorbaLocObject.getKeyString());
} else {
result = getIORUsingCorbaloc( theCorbaLocObject );
}
return result;
}
/**
* resolves a corbaname: url that is encapsulated in a CorbanameURL object.
*
* @return the CORBA.Object if resolution is successful
*/
private org.omg.CORBA.Object resolveCorbaname( CorbanameURL theCorbaName ) {
org.omg.CORBA.Object result = null;
try {
NamingContextExt theNamingContext = null;
if( theCorbaName.getRIRFlag( ) ) {
// Case 1 of corbaname: rir#
theNamingContext = getDefaultRootNamingContext( );
} else {
// Case 2 of corbaname: ::hostname#
org.omg.CORBA.Object corbalocResult =
getIORUsingCorbaloc( theCorbaName );
if( corbalocResult == null ) {
return null;
}
theNamingContext =
NamingContextExtHelper.narrow( corbalocResult );
}
String StringifiedName = theCorbaName.getStringifiedName( );
if( StringifiedName == null ) {
// This means return the Root Naming context
return theNamingContext;
} else {
return theNamingContext.resolve_str( StringifiedName );
}
} catch( Exception e ) {
clearRootNamingContextCache( );
return null;
}
}
/**
* This is an internal method to get the IOR from the CorbalocURL object.
*
* @return the CORBA.Object if resolution is successful
*/
private org.omg.CORBA.Object getIORUsingCorbaloc( INSURL corbalocObject )
{
Map profileMap = new HashMap();
List profileList1_0 = new ArrayList();
// corbalocObject cannot be null, because it's validated during
// parsing. So no null check is required.
java.util.List theEndpointInfo = corbalocObject.getEndpointInfo();
String theKeyString = corbalocObject.getKeyString();
// If there is no KeyString then it's invalid
if( theKeyString == null ) {
return null;
}
ObjectKey key = orb.getObjectKeyFactory().create(
theKeyString.getBytes() );
IORTemplate iortemp = IORFactories.makeIORTemplate( key.getTemplate() );
java.util.Iterator iterator = theEndpointInfo.iterator( );
while( iterator.hasNext( ) ) {
IIOPEndpointInfo element =
(IIOPEndpointInfo) iterator.next( );
IIOPAddress addr = IIOPFactories.makeIIOPAddress( orb, element.getHost(),
element.getPort() );
GIOPVersion giopVersion = GIOPVersion.getInstance( (byte)element.getMajor(),
(byte)element.getMinor());
IIOPProfileTemplate profileTemplate = null;
if (giopVersion.equals(GIOPVersion.V1_0)) {
profileTemplate = IIOPFactories.makeIIOPProfileTemplate(
orb, giopVersion, addr);
profileList1_0.add(profileTemplate);
} else {
if (profileMap.get(giopVersion) == null) {
profileTemplate = IIOPFactories.makeIIOPProfileTemplate(
orb, giopVersion, addr);
profileMap.put(giopVersion, profileTemplate);
} else {
profileTemplate = (IIOPProfileTemplate)profileMap.get(giopVersion);
AlternateIIOPAddressComponent iiopAddressComponent =
IIOPFactories.makeAlternateIIOPAddressComponent(addr);
profileTemplate.add(iiopAddressComponent);
}
}
}
GIOPVersion giopVersion = orb.getORBData().getGIOPVersion();
IIOPProfileTemplate pTemplate = (IIOPProfileTemplate)profileMap.get(giopVersion);
if (pTemplate != null) {
iortemp.add(pTemplate); // Add profile for GIOP version used by this ORB
profileMap.remove(giopVersion); // Now remove this value from the map
}
// Create a comparator that can sort in decending order (1.2, 1.1, ...)
Comparator comp = new Comparator() {
public int compare(Object o1, Object o2) {
GIOPVersion gv1 = (GIOPVersion)o1;
GIOPVersion gv2 = (GIOPVersion)o2;
return (gv1.lessThan(gv2) ? 1 : (gv1.equals(gv2) ? 0 : -1));
};
};
// Now sort using the above comparator
List list = new ArrayList(profileMap.keySet());
Collections.sort(list, comp);
// Add the profiles in the sorted order
Iterator iter = list.iterator();
while (iter.hasNext()) {
IIOPProfileTemplate pt = (IIOPProfileTemplate)profileMap.get(iter.next());
iortemp.add(pt);
}
// Finally add the 1.0 profiles
iortemp.addAll(profileList1_0);
IOR ior = iortemp.makeIOR( orb, "", key.getId() ) ;
return ORBUtility.makeObjectReference( ior ) ;
}
/**
* This is required for corbaname: resolution. Currently we
* are not caching RootNamingContext as the reference to rootNamingContext
* may not be Persistent in all the implementations.
* _REVISIT_ to clear the rootNamingContext in case of COMM_FAILURE.
*
* @return the org.omg.COSNaming.NamingContextExt if resolution is
* successful
*
*/
private NamingContextExt getDefaultRootNamingContext( ) {
synchronized( rootContextCacheLock ) {
if( rootNamingContextExt == null ) {
try {
rootNamingContextExt =
NamingContextExtHelper.narrow(
orb.getLocalResolver().resolve( "NameService" ) );
} catch( Exception e ) {
rootNamingContextExt = null;
}
}
}
return rootNamingContextExt;
}
/**
* A utility method to clear the RootNamingContext, if there is an
* exception in resolving CosNaming:Name from the RootNamingContext,
*/
private void clearRootNamingContextCache( ) {
synchronized( rootContextCacheLock ) {
rootNamingContextExt = null;
}
}
}
| |
/*
* This file is part of Sponge, licensed under the MIT License (MIT).
*
* Copyright (c) SpongePowered <https://www.spongepowered.org>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.spongepowered.mod.event;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.common.collect.ImmutableBiMap;
import com.google.common.collect.ImmutableMap;
import net.minecraftforge.common.MinecraftForge;
import net.minecraftforge.event.terraingen.BiomeEvent;
import net.minecraftforge.event.terraingen.DecorateBiomeEvent;
import net.minecraftforge.event.terraingen.InitMapGenEvent;
import net.minecraftforge.event.terraingen.InitNoiseGensEvent;
import net.minecraftforge.event.terraingen.OreGenEvent;
import net.minecraftforge.event.terraingen.PopulateChunkEvent;
import net.minecraftforge.event.terraingen.SaplingGrowTreeEvent;
import net.minecraftforge.event.terraingen.WorldTypeEvent;
import net.minecraftforge.fml.common.FMLCommonHandler;
import net.minecraftforge.fml.common.eventhandler.EventBus;
import net.minecraftforge.fml.common.eventhandler.EventPriority;
import net.minecraftforge.fml.common.eventhandler.IEventListener;
import net.minecraftforge.fml.common.gameevent.PlayerEvent;
import org.spongepowered.api.event.Cancellable;
import org.spongepowered.api.event.Event;
import org.spongepowered.api.event.Order;
import org.spongepowered.api.event.block.BreakBlockEvent;
import org.spongepowered.api.event.block.HarvestBlockEvent;
import org.spongepowered.api.event.block.InteractBlockEvent;
import org.spongepowered.api.event.block.NotifyNeighborBlockEvent;
import org.spongepowered.api.event.block.PlaceBlockEvent;
import org.spongepowered.api.event.command.MessageSinkEvent;
import org.spongepowered.api.event.entity.ConstructEntityEvent;
import org.spongepowered.api.event.entity.DestructEntityEvent;
import org.spongepowered.api.event.entity.SpawnEntityEvent;
import org.spongepowered.api.event.entity.TargetEntityEvent;
import org.spongepowered.api.event.inventory.UseItemStackEvent;
import org.spongepowered.api.event.network.ClientConnectionEvent;
import org.spongepowered.api.event.world.LoadWorldEvent;
import org.spongepowered.api.event.world.TargetWorldEvent;
import org.spongepowered.api.event.world.UnloadWorldEvent;
import org.spongepowered.api.event.world.chunk.LoadChunkEvent;
import org.spongepowered.api.event.world.chunk.TargetChunkEvent;
import org.spongepowered.api.event.world.chunk.UnloadChunkEvent;
import org.spongepowered.api.plugin.PluginManager;
import org.spongepowered.common.Sponge;
import org.spongepowered.common.event.RegisteredListener;
import org.spongepowered.common.event.SpongeEventManager;
import org.spongepowered.mod.SpongeMod;
import org.spongepowered.mod.interfaces.IMixinEvent;
import org.spongepowered.mod.interfaces.IMixinEventBus;
import java.util.List;
import javax.inject.Inject;
public class SpongeModEventManager extends SpongeEventManager {
@SuppressWarnings("unused") private final ImmutableBiMap<EventPriority, Order> priorityMappings =
new ImmutableBiMap.Builder<EventPriority, Order>()
.put(EventPriority.HIGHEST, Order.FIRST)
.put(EventPriority.HIGH, Order.EARLY)
.put(EventPriority.NORMAL, Order.DEFAULT)
.put(EventPriority.LOW, Order.LATE)
.put(EventPriority.LOWEST, Order.LAST)
.build();
private final ImmutableMap<Class<? extends Event>, Class<? extends net.minecraftforge.fml.common.eventhandler.Event>> eventMappings =
new ImmutableMap.Builder<Class<? extends Event>, Class<? extends net.minecraftforge.fml.common.eventhandler.Event>>()
.put(NotifyNeighborBlockEvent.class, net.minecraftforge.event.world.BlockEvent.NeighborNotifyEvent.class)
.put(TargetChunkEvent.class, net.minecraftforge.event.world.ChunkEvent.class)
.put(LoadChunkEvent.class, net.minecraftforge.event.world.ChunkEvent.Load.class)
.put(UnloadChunkEvent.class, net.minecraftforge.event.world.ChunkEvent.Unload.class)
.put(ConstructEntityEvent.Post.class, net.minecraftforge.event.entity.EntityEvent.EntityConstructing.class)
.put(TargetEntityEvent.class, net.minecraftforge.event.entity.EntityEvent.class)
.put(SpawnEntityEvent.class, net.minecraftforge.event.entity.EntityJoinWorldEvent.class)
.put(DestructEntityEvent.Death.class, net.minecraftforge.event.entity.living.LivingDeathEvent.class)
.put(BreakBlockEvent.class, net.minecraftforge.event.world.BlockEvent.BreakEvent.class)
.put(MessageSinkEvent.class, net.minecraftforge.event.ServerChatEvent.class)
.put(HarvestBlockEvent.class, net.minecraftforge.event.world.BlockEvent.HarvestDropsEvent.class)
.put(InteractBlockEvent.class, net.minecraftforge.event.entity.player.PlayerInteractEvent.class)
.put(PlaceBlockEvent.class, net.minecraftforge.event.world.BlockEvent.PlaceEvent.class)
.put(TargetWorldEvent.class, net.minecraftforge.event.world.WorldEvent.class)
.put(LoadWorldEvent.class, net.minecraftforge.event.world.WorldEvent.Load.class)
.put(UnloadWorldEvent.class, net.minecraftforge.event.world.WorldEvent.Unload.class)
.put(UseItemStackEvent.Start.class, net.minecraftforge.event.entity.player.PlayerUseItemEvent.Start.class)
.put(UseItemStackEvent.Tick.class, net.minecraftforge.event.entity.player.PlayerUseItemEvent.Tick.class)
.put(UseItemStackEvent.Stop.class, net.minecraftforge.event.entity.player.PlayerUseItemEvent.Stop.class)
.put(UseItemStackEvent.Finish.class, net.minecraftforge.event.entity.player.PlayerUseItemEvent.Finish.class)
.put(ClientConnectionEvent.Join.class, PlayerEvent.PlayerLoggedInEvent.class)
.put(ClientConnectionEvent.Disconnect.class, PlayerEvent.PlayerLoggedOutEvent.class)
.build();
private final ImmutableMap<Class<? extends net.minecraftforge.fml.common.eventhandler.Event>, EventBus> busMappings =
new ImmutableMap.Builder<Class<? extends net.minecraftforge.fml.common.eventhandler.Event>, EventBus>()
.put(OreGenEvent.class, MinecraftForge.ORE_GEN_BUS)
.put(WorldTypeEvent.class, MinecraftForge.TERRAIN_GEN_BUS)
.put(BiomeEvent.class, MinecraftForge.TERRAIN_GEN_BUS)
.put(DecorateBiomeEvent.class, MinecraftForge.TERRAIN_GEN_BUS)
.put(InitMapGenEvent.class, MinecraftForge.TERRAIN_GEN_BUS)
.put(InitNoiseGensEvent.class, MinecraftForge.TERRAIN_GEN_BUS)
.put(PopulateChunkEvent.class, MinecraftForge.TERRAIN_GEN_BUS)
.put(SaplingGrowTreeEvent.class, MinecraftForge.TERRAIN_GEN_BUS)
.put(net.minecraftforge.fml.common.gameevent.PlayerEvent.class, FMLCommonHandler.instance().bus())
.build();
@Inject
public SpongeModEventManager(PluginManager pluginManager) {
super(pluginManager);
}
public boolean post(net.minecraftforge.fml.common.eventhandler.Event forgeEvent, IEventListener[] listeners) {
checkNotNull(forgeEvent, "forgeEvent");
Event spongeEvent = ((IMixinEvent) forgeEvent).createSpongeEvent();
RegisteredListener.Cache listenerCache = getHandlerCache(spongeEvent);
// Fire events to plugins before modifications
for (Order order : Order.values()) {
postBeforeModifications(spongeEvent, listenerCache.getListenersByOrder(order));
}
// sync plugin data for Mods
((IMixinEvent) forgeEvent).syncDataToForge(spongeEvent);
for (IEventListener listener : listeners) {
try {
listener.invoke(forgeEvent);
} catch (Throwable throwable) {
SpongeMod.instance.getLogger().catching(throwable);
}
}
// sync Forge data for Plugins
((IMixinEvent)spongeEvent).syncDataToSponge(forgeEvent);
// Fire events to plugins after modifications (default)
for (Order order : Order.values()) {
post(spongeEvent, listenerCache.getListenersByOrder(order));
}
// sync plugin data for Forge
((IMixinEvent) forgeEvent).syncDataToForge(spongeEvent);
if (spongeEvent instanceof Cancellable) {
if (((Cancellable) spongeEvent).isCancelled()) {
forgeEvent.setCanceled(true);
}
}
return forgeEvent.isCancelable() && forgeEvent.isCanceled();
}
@SuppressWarnings("unchecked")
protected static boolean postBeforeModifications(Event event, List<RegisteredListener<?>> listeners) {
for (@SuppressWarnings("rawtypes")
RegisteredListener listener : listeners) {
try {
if (listener.isBeforeModifications()) {
listener.handle(event);
}
} catch (Throwable e) {
Sponge.getLogger().error("Could not pass {} to {}", event.getClass().getSimpleName(), listener.getPlugin(), e);
}
}
return event instanceof Cancellable && ((Cancellable) event).isCancelled();
}
@SuppressWarnings("unchecked")
protected static boolean post(Event event, List<RegisteredListener<?>> listeners) {
for (@SuppressWarnings("rawtypes")
RegisteredListener listener : listeners) {
try {
if (!listener.isBeforeModifications()) {
listener.handle(event);
}
} catch (Throwable e) {
Sponge.getLogger().error("Could not pass {} to {}", event.getClass().getSimpleName(), listener.getPlugin(), e);
}
}
return event instanceof Cancellable && ((Cancellable) event).isCancelled();
}
@Override
public boolean post(Event event) {
if (SpongeMod.instance.isClientThread()) {
return false;
}
Class<? extends net.minecraftforge.fml.common.eventhandler.Event> clazz = this.eventMappings.get(event.getClass().getInterfaces()[0]);
if (clazz != null) {
net.minecraftforge.fml.common.eventhandler.Event forgeEvent = SpongeForgeEventFactory.findAndCreateForgeEvent(event, clazz);
if (forgeEvent != null) {
// Avoid separate mappings for events defined as inner classes
Class<?> enclosingClass = forgeEvent.getClass().getEnclosingClass();
EventBus bus = this.busMappings.get(enclosingClass == null ? forgeEvent.getClass() : enclosingClass);
if (bus == null) {
bus = MinecraftForge.EVENT_BUS;
}
return post(forgeEvent, forgeEvent.getListenerList().getListeners(((IMixinEventBus) bus).getBusID()));
}
}
return super.post(event);
}
}
| |
/* ************************************************************************
#
# DivConq
#
# http://divconq.com/
#
# Copyright:
# Copyright 2014 eTimeline, LLC. All rights reserved.
#
# License:
# See the license.txt file in the project's top-level directory for details.
#
# Authors:
# * Andy White
#
************************************************************************ */
package divconq.test.bus;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.HashMap;
import java.util.Scanner;
import divconq.api.ApiSession;
import divconq.api.DumpCallback;
import divconq.api.tasks.TaskFactory;
import divconq.bus.Message;
import divconq.bus.net.SocketInfo;
import divconq.filestore.CommonPath;
import divconq.hub.Foreground;
import divconq.hub.Hub;
import divconq.hub.ILocalCommandLine;
import divconq.lang.op.OperationContext;
import divconq.lang.op.OperationObserver;
import divconq.util.StringUtil;
import divconq.work.Task;
/**
* ONLY works with local session, does not work with remote sessions
*/
public class Backend implements ILocalCommandLine {
@Override
public void run(final Scanner scan, final ApiSession api) {
HashMap<String, SocketInfo> listeners = new HashMap<>();
HashMap<String, SocketInfo> connectors = new HashMap<>();
boolean running = true;
while(running) {
try {
System.out.println();
System.out.println("-----------------------------------------------");
System.out.println(" dcBackend Utilities");
System.out.println("-----------------------------------------------");
System.out.println("0) Exit");
System.out.println("1) Add Listener");
System.out.println("2) Remove Listener");
System.out.println("3) Bus Status");
System.out.println("4) Tickle Hub");
System.out.println("5) Echo Hub");
System.out.println("6) Add Connector");
System.out.println("7) Remove Connector");
System.out.println("8) Encrypt/Util");
System.out.println("9) Initiate Upload");
System.out.println("10) Initiate Download");
System.out.println("12) Verify Self");
String opt = scan.nextLine();
Long mopt = StringUtil.parseInt(opt);
if (mopt == null)
continue;
switch (mopt.intValue()) {
case 0:
running = false;
break;
case 1: {
System.out.println("Add port: ");
String port = scan.nextLine();
if (listeners.containsKey(port))
System.out.println("already listening");
else {
SocketInfo info = SocketInfo.buildLoopback((int)StringUtil.parseInt(port, 0), true);
if (info.getPort() == 0)
System.out.println("bad port");
else {
Hub.instance.getBus().addListener(info);
listeners.put(port, info);
System.out.println("listener added");
}
}
break;
}
case 2: {
System.out.println("Remove port: ");
String port = scan.nextLine();
if (!listeners.containsKey(port))
System.out.println("not listening to that port");
else {
SocketInfo info = listeners.remove(port);
if (info == null)
System.out.println("bad port");
else {
Hub.instance.getBus().removeListener(info);
System.out.println("listener removed");
}
}
break;
}
case 3: {
Hub.instance.getBus().dumpInfo();
break;
}
case 4: {
System.out.println("To Hub: ");
String to = scan.nextLine();
Message msg = new Message("Status", "Tickle", "Test");
msg.withToHub(to);
api.sendForgetMessage(msg);
break;
}
case 5: {
System.out.println("To Hub: ");
String to = scan.nextLine();
System.out.println("Message: ");
String data = scan.nextLine();
Message msg = new Message("Status", "Echo", "Test", data + " - av73Dw??gT80Hgt");
msg.withToHub(to);
api.sendMessage(msg, new DumpCallback("Echo"));
break;
}
case 6: {
System.out.println("Add hubid: ");
String hubid = scan.nextLine();
System.out.println("Add port: ");
String port = scan.nextLine();
if (connectors.containsKey(hubid))
System.out.println("already connecting");
else {
SocketInfo info = SocketInfo.buildLoopback((int)StringUtil.parseInt(port, 0), true);
info.setHubId(hubid);
if ((info.getPort() == 0) || StringUtil.isEmpty(hubid))
System.out.println("bad connector");
else {
Hub.instance.getBus().addConnector(info);
connectors.put(hubid, info);
System.out.println("connector added");
}
}
break;
}
case 7: {
System.out.println("Remove hubid: ");
String hubid = scan.nextLine();
if (!connectors.containsKey(hubid))
System.out.println("not connecting to that port");
else {
SocketInfo info = connectors.remove(hubid);
if (info == null)
System.out.println("bad connector");
else {
Hub.instance.getBus().removeConnector(info);
System.out.println("connector removed");
}
}
break;
}
case 8: {
System.out.println();
Foreground.utilityMenu(scan);
break;
}
case 9: {
System.out.println("File Name: ");
final String fname = "D:\\dev\\divconq\\hub\\lib\\guava-14.0-rc1.jar"; //scan.nextLine();
System.out.println("Save Path ([enter] for root): ");
final String spath = scan.nextLine();
final Path src = Paths.get(fname);
CommonPath dest = new CommonPath(spath + "/" + src.getFileName());
// TODO name
Task uploadtask = TaskFactory.createUploadTask(api, "x", src, dest, null, true);
Hub.instance.getWorkPool().submit(uploadtask, new OperationObserver() {
@Override
public void completed(OperationContext or) {
if (or.hasErrors())
System.out.println("Upload failed!");
else
System.out.println("Upload worked!");
}
});
break;
}
case 10: {
/* TODO
System.out.println("File Name: ");
final String spath = scan.nextLine();
final CommonPath src = new CommonPath(spath);
System.out.println("Save Path: ");
final Path dest = Paths.get(scan.nextLine(), src.getFileName());
DataStreamApi dsapi = new DataStreamApi(api);
dsapi.simpleDownloadAndVerify(src, dest, new OperationCallback() {
// initializer is optional, it supports the progress bar, etc
{
this.addObserver(new Observer() {
@Override
public void update(Observable or, Object area) {
// TODO output progress
}
});
}
@Override
public void callback() {
if (this.hasErrors()) {
System.out.println("Download Error: " + this.getMessage());
return;
}
System.out.println("Download complete!");
}
});
*/
break;
}
case 11: {
Message msg = new Message("Status", "Echo", "Test", "Test 1");
Message rmsg = api.sendMessage(msg);
System.out.println("Response 1: " + rmsg.getFieldAsString("Body"));
msg = new Message("Status", "Echo", "Test", "Test 2");
rmsg = api.sendMessage(msg);
System.out.println("Response 2: " + rmsg.getFieldAsString("Body"));
msg = new Message("Status", "Echo", "Test", "Test 3");
rmsg = api.sendMessage(msg);
System.out.println("Response 3: " + rmsg.getFieldAsString("Body"));
msg = new Message("Status", "Echo", "Test", "Test 4");
rmsg = api.sendMessage(msg);
System.out.println("Response 4: " + rmsg.getFieldAsString("Body"));
break;
}
case 12: {
Message msg = new Message("Status", "Info", "Test");
api.sendMessage(msg, new DumpCallback("Info"));
break;
}
}
}
catch (Exception x) {
System.out.println("Cli Error: " + x);
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Autogenerated by Thrift Compiler (0.10.0)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package org.apache.accumulo.proxy.thrift;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"})
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.10.0)")
public class CompactionStrategyConfig implements org.apache.thrift.TBase<CompactionStrategyConfig, CompactionStrategyConfig._Fields>, java.io.Serializable, Cloneable, Comparable<CompactionStrategyConfig> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("CompactionStrategyConfig");
private static final org.apache.thrift.protocol.TField CLASS_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("className", org.apache.thrift.protocol.TType.STRING, (short)1);
private static final org.apache.thrift.protocol.TField OPTIONS_FIELD_DESC = new org.apache.thrift.protocol.TField("options", org.apache.thrift.protocol.TType.MAP, (short)2);
private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new CompactionStrategyConfigStandardSchemeFactory();
private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new CompactionStrategyConfigTupleSchemeFactory();
public java.lang.String className; // required
public java.util.Map<java.lang.String,java.lang.String> options; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
CLASS_NAME((short)1, "className"),
OPTIONS((short)2, "options");
private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();
static {
for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // CLASS_NAME
return CLASS_NAME;
case 2: // OPTIONS
return OPTIONS;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(java.lang.String name) {
return byName.get(name);
}
private final short _thriftId;
private final java.lang.String _fieldName;
_Fields(short thriftId, java.lang.String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public java.lang.String getFieldName() {
return _fieldName;
}
}
// isset id assignments
public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.CLASS_NAME, new org.apache.thrift.meta_data.FieldMetaData("className", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.OPTIONS, new org.apache.thrift.meta_data.FieldMetaData("options", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING),
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))));
metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(CompactionStrategyConfig.class, metaDataMap);
}
public CompactionStrategyConfig() {
}
public CompactionStrategyConfig(
java.lang.String className,
java.util.Map<java.lang.String,java.lang.String> options)
{
this();
this.className = className;
this.options = options;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public CompactionStrategyConfig(CompactionStrategyConfig other) {
if (other.isSetClassName()) {
this.className = other.className;
}
if (other.isSetOptions()) {
java.util.Map<java.lang.String,java.lang.String> __this__options = new java.util.HashMap<java.lang.String,java.lang.String>(other.options);
this.options = __this__options;
}
}
public CompactionStrategyConfig deepCopy() {
return new CompactionStrategyConfig(this);
}
@Override
public void clear() {
this.className = null;
this.options = null;
}
public java.lang.String getClassName() {
return this.className;
}
public CompactionStrategyConfig setClassName(java.lang.String className) {
this.className = className;
return this;
}
public void unsetClassName() {
this.className = null;
}
/** Returns true if field className is set (has been assigned a value) and false otherwise */
public boolean isSetClassName() {
return this.className != null;
}
public void setClassNameIsSet(boolean value) {
if (!value) {
this.className = null;
}
}
public int getOptionsSize() {
return (this.options == null) ? 0 : this.options.size();
}
public void putToOptions(java.lang.String key, java.lang.String val) {
if (this.options == null) {
this.options = new java.util.HashMap<java.lang.String,java.lang.String>();
}
this.options.put(key, val);
}
public java.util.Map<java.lang.String,java.lang.String> getOptions() {
return this.options;
}
public CompactionStrategyConfig setOptions(java.util.Map<java.lang.String,java.lang.String> options) {
this.options = options;
return this;
}
public void unsetOptions() {
this.options = null;
}
/** Returns true if field options is set (has been assigned a value) and false otherwise */
public boolean isSetOptions() {
return this.options != null;
}
public void setOptionsIsSet(boolean value) {
if (!value) {
this.options = null;
}
}
public void setFieldValue(_Fields field, java.lang.Object value) {
switch (field) {
case CLASS_NAME:
if (value == null) {
unsetClassName();
} else {
setClassName((java.lang.String)value);
}
break;
case OPTIONS:
if (value == null) {
unsetOptions();
} else {
setOptions((java.util.Map<java.lang.String,java.lang.String>)value);
}
break;
}
}
public java.lang.Object getFieldValue(_Fields field) {
switch (field) {
case CLASS_NAME:
return getClassName();
case OPTIONS:
return getOptions();
}
throw new java.lang.IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new java.lang.IllegalArgumentException();
}
switch (field) {
case CLASS_NAME:
return isSetClassName();
case OPTIONS:
return isSetOptions();
}
throw new java.lang.IllegalStateException();
}
@Override
public boolean equals(java.lang.Object that) {
if (that == null)
return false;
if (that instanceof CompactionStrategyConfig)
return this.equals((CompactionStrategyConfig)that);
return false;
}
public boolean equals(CompactionStrategyConfig that) {
if (that == null)
return false;
if (this == that)
return true;
boolean this_present_className = true && this.isSetClassName();
boolean that_present_className = true && that.isSetClassName();
if (this_present_className || that_present_className) {
if (!(this_present_className && that_present_className))
return false;
if (!this.className.equals(that.className))
return false;
}
boolean this_present_options = true && this.isSetOptions();
boolean that_present_options = true && that.isSetOptions();
if (this_present_options || that_present_options) {
if (!(this_present_options && that_present_options))
return false;
if (!this.options.equals(that.options))
return false;
}
return true;
}
@Override
public int hashCode() {
int hashCode = 1;
hashCode = hashCode * 8191 + ((isSetClassName()) ? 131071 : 524287);
if (isSetClassName())
hashCode = hashCode * 8191 + className.hashCode();
hashCode = hashCode * 8191 + ((isSetOptions()) ? 131071 : 524287);
if (isSetOptions())
hashCode = hashCode * 8191 + options.hashCode();
return hashCode;
}
@Override
public int compareTo(CompactionStrategyConfig other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = java.lang.Boolean.valueOf(isSetClassName()).compareTo(other.isSetClassName());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetClassName()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.className, other.className);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.valueOf(isSetOptions()).compareTo(other.isSetOptions());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetOptions()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.options, other.options);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
scheme(iprot).read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
scheme(oprot).write(oprot, this);
}
@Override
public java.lang.String toString() {
java.lang.StringBuilder sb = new java.lang.StringBuilder("CompactionStrategyConfig(");
boolean first = true;
sb.append("className:");
if (this.className == null) {
sb.append("null");
} else {
sb.append(this.className);
}
first = false;
if (!first) sb.append(", ");
sb.append("options:");
if (this.options == null) {
sb.append("null");
} else {
sb.append(this.options);
}
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class CompactionStrategyConfigStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public CompactionStrategyConfigStandardScheme getScheme() {
return new CompactionStrategyConfigStandardScheme();
}
}
private static class CompactionStrategyConfigStandardScheme extends org.apache.thrift.scheme.StandardScheme<CompactionStrategyConfig> {
public void read(org.apache.thrift.protocol.TProtocol iprot, CompactionStrategyConfig struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // CLASS_NAME
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.className = iprot.readString();
struct.setClassNameIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 2: // OPTIONS
if (schemeField.type == org.apache.thrift.protocol.TType.MAP) {
{
org.apache.thrift.protocol.TMap _map154 = iprot.readMapBegin();
struct.options = new java.util.HashMap<java.lang.String,java.lang.String>(2*_map154.size);
java.lang.String _key155;
java.lang.String _val156;
for (int _i157 = 0; _i157 < _map154.size; ++_i157)
{
_key155 = iprot.readString();
_val156 = iprot.readString();
struct.options.put(_key155, _val156);
}
iprot.readMapEnd();
}
struct.setOptionsIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, CompactionStrategyConfig struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.className != null) {
oprot.writeFieldBegin(CLASS_NAME_FIELD_DESC);
oprot.writeString(struct.className);
oprot.writeFieldEnd();
}
if (struct.options != null) {
oprot.writeFieldBegin(OPTIONS_FIELD_DESC);
{
oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRING, struct.options.size()));
for (java.util.Map.Entry<java.lang.String, java.lang.String> _iter158 : struct.options.entrySet())
{
oprot.writeString(_iter158.getKey());
oprot.writeString(_iter158.getValue());
}
oprot.writeMapEnd();
}
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class CompactionStrategyConfigTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public CompactionStrategyConfigTupleScheme getScheme() {
return new CompactionStrategyConfigTupleScheme();
}
}
private static class CompactionStrategyConfigTupleScheme extends org.apache.thrift.scheme.TupleScheme<CompactionStrategyConfig> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, CompactionStrategyConfig struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
java.util.BitSet optionals = new java.util.BitSet();
if (struct.isSetClassName()) {
optionals.set(0);
}
if (struct.isSetOptions()) {
optionals.set(1);
}
oprot.writeBitSet(optionals, 2);
if (struct.isSetClassName()) {
oprot.writeString(struct.className);
}
if (struct.isSetOptions()) {
{
oprot.writeI32(struct.options.size());
for (java.util.Map.Entry<java.lang.String, java.lang.String> _iter159 : struct.options.entrySet())
{
oprot.writeString(_iter159.getKey());
oprot.writeString(_iter159.getValue());
}
}
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, CompactionStrategyConfig struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
java.util.BitSet incoming = iprot.readBitSet(2);
if (incoming.get(0)) {
struct.className = iprot.readString();
struct.setClassNameIsSet(true);
}
if (incoming.get(1)) {
{
org.apache.thrift.protocol.TMap _map160 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRING, iprot.readI32());
struct.options = new java.util.HashMap<java.lang.String,java.lang.String>(2*_map160.size);
java.lang.String _key161;
java.lang.String _val162;
for (int _i163 = 0; _i163 < _map160.size; ++_i163)
{
_key161 = iprot.readString();
_val162 = iprot.readString();
struct.options.put(_key161, _val162);
}
}
struct.setOptionsIsSet(true);
}
}
}
private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) {
return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme();
}
private static void unusedMethod() {}
}
| |
// ----------------------------------------------------------------------------
// ----------------------------------------------------------------------------
package DSC.android.services.DSCcoreserviceproxy;
import java.util.ArrayList;
import java.util.BitSet;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.ServiceConnection;
import android.os.Bundle;
import android.os.Handler;
import android.os.IBinder;
import android.os.Message;
import android.os.Messenger;
import android.os.RemoteException;
import android.os.SystemClock;
import android.util.Log;
public class DSCCoreServiceProxy {
private Context mContext;
static final int MSG_REGISTER_CLIENT = -1;
static final int MSG_UNREGISTER_CLIENT = 0;
static final int MSG_GET_FRAMERATE = 1;
static final int MSG_SET_FRAMERATE = 2;
static final int MSG_GET_RESOLUTION = 3;
static final int MSG_SET_RESOLUTION = 4;
static final int MSG_GET_SENSORMODE = 5;
static final int MSG_SET_SENSORMODE = 6;
static final int MSG_GET_DENOISING = 7;
static final int MSG_SET_DENOISING = 8;
static final int MSG_GET_GESTUREMAP = 9;
static final int MSG_SET_GESTUREMAP = 10;
static final int MSG_GET_DEPTHIMAGE = 11;
static final int MSG_GET_CONFIDENCEIMAGE = 12;
static final int MSG_GET_VERTEXIMAGE = 13;
private static final String TAG = "DSCCoreServiceProxy";
static DSCCoreServiceListener mListener;
/** Messenger for communicating with service. */
Messenger mService = null;
/** Flag indicating whether we have called bind on the service. */
boolean mIsBound;
long time1;
/**
* Handler of incoming messages from service.
*/
class IncomingHandler extends Handler {
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
case DSCCoreServiceProxy.MSG_GET_FRAMERATE:
case DSCCoreServiceProxy.MSG_GET_RESOLUTION:
case DSCCoreServiceProxy.MSG_GET_DENOISING:
case DSCCoreServiceProxy.MSG_GET_GESTUREMAP:
case DSCCoreServiceProxy.MSG_GET_SENSORMODE:
Bundle data = new Bundle();
data = msg.getData();
int retval = data.getInt("get_val");
Log.d(TAG, "Got a result for a GET call, value is " + retval);
mListener.onResult(retval);
break;
case DSCCoreServiceProxy.MSG_GET_DEPTHIMAGE:
case DSCCoreServiceProxy.MSG_GET_VERTEXIMAGE:
case DSCCoreServiceProxy.MSG_GET_CONFIDENCEIMAGE:
long time2 = SystemClock.elapsedRealtime();
data = new Bundle();
data = msg.getData();
byte[] depth_image = new byte[150 * 1024 + 1];
depth_image = data.getByteArray("depth_image_data");
mListener.onResult(depth_image);
Log.d(TAG, "time taken to read 150K bytes over socket = "
+ (time2 - time1) + "Time1 = " + time1 + "; Time2 = "
+ time2 + "size of depthmap received is "
+ depth_image.length);
/*
* Test code to see if we are getting the right values
* PrintWriter pw = null; FileOutputStream fw = null ; File
* depthmap = new File("/data/local/tmp/depthmap.txt") ; try {
* fw = new FileOutputStream(depthmap);
* fw.write(depth_image,0,depth_image.length -1); fw.close(); }
* catch (IOException e) { // TODO Auto-generated catch block
* e.printStackTrace(); }
*/
break;
default:
super.handleMessage(msg);
}
}
}
/**
* Target we publish for clients to send messages to IncomingHandler.
*/
final Messenger mMessenger = new Messenger(new IncomingHandler());
public void setListener(DSCCoreServiceListener l) {
mListener = l;
}
/**
* Class for interacting with the main interfaces of the service.
*/
private ServiceConnection mConnection = new ServiceConnection() {
public void onServiceConnected(ComponentName className, IBinder service) {
mService = new Messenger(service);
try {
Message msg = Message.obtain(null,
DSCCoreServiceProxy.MSG_REGISTER_CLIENT);
msg.replyTo = mMessenger;
mService.send(msg);
} catch (RemoteException e) {
// In this case the service has crashed before we could even
// do anything with it; we can count on soon being
// disconnected (and then reconnected if it can be restarted)
// so there is no need to do anything here.
}
// As part of the sample, tell the user what happened.
mIsBound = true;
Log.d(TAG, "DSCCoreServiceProxy connected to Service");
}
public void onServiceDisconnected(ComponentName className) {
// This is called when the connection with the service has been
// unexpectedly disconnected -- that is, its process crashed.
mService = null;
mIsBound = false;
Log.d(TAG, "DSCCoreServiceProxy disconnected to Service");
}
};
public boolean isBound() {
return mIsBound;
}
public void doBindService() {
// Establish a connection with the service. We use an explicit
// class name because there is no reason to be able to let other
// applications replace our component.
Intent serviceintent = new Intent(
"DSC.samples.DSCcoreandroidservice.DSCCoreAndroidService");
mContext.bindService(serviceintent, mConnection,
Context.BIND_AUTO_CREATE);
Log.d(TAG, "Binding");
}
public void doUnbindService() {
if (mIsBound) {
// If we have received the service, and hence registered with
// it, then now is the time to unregister.
if (mService != null) {
try {
Message msg = Message.obtain(null,
DSCCoreServiceProxy.MSG_UNREGISTER_CLIENT);
msg.replyTo = mMessenger;
mService.send(msg);
} catch (RemoteException e) {
// There is nothing special we need to do if the service
// has crashed.
}
}
// Detach our existing connection.
mContext.unbindService(mConnection);
mIsBound = false;
Log.d(TAG, "Unbinding");
}
}
public DSCCoreServiceProxy(Context ctx) {
this.mContext = ctx;
}
private void sendCommandDownstream(TouchCommandParams tc) {
Bundle b = new Bundle();
for (int i = 2; i < tc.size(); i++) {
b.putInt("param" + (i - 1), (Integer) tc.get(i));
}
Message msg = Message.obtain(null, (Integer) tc.get(1));
if (tc.size() > 2) {
msg.setData(b);
}
try {
mService.send(msg);
} catch (RemoteException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
public void setFrameRate(int val) {
TouchCommandParams tc = new TouchCommandParams();
tc.setCommand(CommandType.mSetFrameRate);
tc.addParam(new Integer(val));
sendCommandDownstream(tc);
}
public void getFrameRate() {
TouchCommandParams tc = new TouchCommandParams();
tc.setCommand(CommandType.mGetFrameRate);
sendCommandDownstream(tc);
}
public void setResolution(int val) {
TouchCommandParams tc = new TouchCommandParams();
tc.setCommand(CommandType.mSetResolution);
tc.addParam(new Integer(val));
sendCommandDownstream(tc);
}
public void getResolution() {
TouchCommandParams tc = new TouchCommandParams();
tc.setCommand(CommandType.mGetResolution);
sendCommandDownstream(tc);
}
public void setSensorMode(int val) {
TouchCommandParams tc = new TouchCommandParams();
tc.setCommand(CommandType.mSetSensorMode);
tc.addParam(new Integer(val));
sendCommandDownstream(tc);
}
public void getSensorMode() {
TouchCommandParams tc = new TouchCommandParams();
tc.setCommand(CommandType.mGetSensorMode);
sendCommandDownstream(tc);
}
public void setGestureMap(BitSet bs) {
TouchCommandParams tc = new TouchCommandParams();
tc.setCommand(CommandType.mSetGestureMap);
tc.addParam(bs);
sendCommandDownstream(tc);
}
public void getGestureMap() {
TouchCommandParams tc = new TouchCommandParams();
tc.setCommand(CommandType.mGetGestureMap);
sendCommandDownstream(tc);
}
public void getDepthMap(int downSamplingRatio, int SubImageX1,
int SubImageY1, int SubImageX2, int SubImageY2) {
Log.d(TAG, "getDepthMap called");
TouchCommandParams tc = new TouchCommandParams();
tc.setCommand(CommandType.mGetDepthMap);
tc.addParam(new Integer(downSamplingRatio));
tc.addParam(new Integer(SubImageX1));
tc.addParam(new Integer(SubImageY1));
tc.addParam(new Integer(SubImageX2));
tc.addParam(new Integer(SubImageY2));
time1 = SystemClock.elapsedRealtime();
sendCommandDownstream(tc);
}
public void getConfidenceImage(int downSamplingRatio, int SubImageX1,
int SubImageY1, int SubImageX2, int SubImageY2) {
TouchCommandParams tc = new TouchCommandParams();
tc.setCommand(CommandType.mGetConfidenceImage);
tc.addParam(new Integer(downSamplingRatio));
tc.addParam(new Integer(SubImageX1));
tc.addParam(new Integer(SubImageY1));
tc.addParam(new Integer(SubImageX2));
tc.addParam(new Integer(SubImageY2));
sendCommandDownstream(tc);
}
public void getVertexImage(int downSamplingRatio, int SubImageX1,
int SubImageY1, int SubImageX2, int SubImageY2) {
TouchCommandParams tc = new TouchCommandParams();
tc.setCommand(CommandType.mGetVertexImage);
tc.addParam(new Integer(downSamplingRatio));
tc.addParam(new Integer(SubImageX1));
tc.addParam(new Integer(SubImageY1));
tc.addParam(new Integer(SubImageX2));
tc.addParam(new Integer(SubImageY2));
sendCommandDownstream(tc);
}
class CommandType {
static final int mGetFrameRate = 1;
static final int mSetFrameRate = 2;
static final int mGetResolution = 3;
static final int mSetResolution = 4;
static final int mGetSensorMode = 5;
static final int mSetSensorMode = 6;
static final int mGetDenoising = 7;
static final int mSetDenoising = 8;
static final int mGetGestureMap = 9;
static final int mSetGestureMap = 10;
static final int mGetDepthMap = 11;
static final int mGetConfidenceImage = 12;
static final int mGetVertexImage = 13;
}
class TouchCommandParams extends ArrayList {
private static final long serialVersionUID = 1L;
boolean Repeat = false;
int Command;
int MagicMarker = 0xabcd;
TouchCommandParams() {
this.add(MagicMarker);
}
void setCommand(int cmd) {
this.Command = cmd;
this.add(new Integer(cmd));
}
int getCommand() {
return this.Command;
}
void addParam(Object params) {
this.add(params);
}
}
}
| |
package edu.stanford.nlp.trees.international.negra;
import java.util.HashMap;
import edu.stanford.nlp.trees.AbstractCollinsHeadFinder;
import edu.stanford.nlp.trees.HeadFinder;
import edu.stanford.nlp.trees.Tree;
import edu.stanford.nlp.trees.TreebankLanguagePack;
/**
* HeadFinder for the Negra Treebank. Adapted from
* CollinsHeadFinder.
*
* @author Roger Levy
*/
public class NegraHeadFinder extends AbstractCollinsHeadFinder {
/**
*
*/
private static final long serialVersionUID = -7253035927065152766L;
private static final boolean DEBUG = false;
/** Vends a "semantic" NegraHeadFinder---one that disprefers modal/auxiliary verbs as the heads of S or VP.
*
* @return a NegraHeadFinder that uses a "semantic" head-finding rule for the S category.
*/
public static HeadFinder negraSemanticHeadFinder() {
NegraHeadFinder result = new NegraHeadFinder();
result.nonTerminalInfo.put("S", new String[][]{{result.right, "VVFIN", "VVIMP"}, {"right", "VP","CVP"}, { "right", "VMFIN", "VAFIN", "VAIMP"}, {"right", "S","CS"}});
result.nonTerminalInfo.put("VP", new String[][]{{"right","VVINF","VVIZU","VVPP"}, {result.right, "VZ", "VAINF", "VMINF", "VMPP", "VAPP", "PP"}});
result.nonTerminalInfo.put("VZ", new String[][]{{result.right,"VVINF","VAINF","VMINF","VVFIN","VVIZU"}}); // note that VZ < VVIZU is very rare, maybe shouldn't even exist.
return result;
}
private boolean coordSwitch = false;
public NegraHeadFinder() {
this(new NegraPennLanguagePack());
}
String left;
String right;
public NegraHeadFinder(TreebankLanguagePack tlp) {
super(tlp);
nonTerminalInfo = new HashMap();
left = (coordSwitch ? "right" : "left");
right = (coordSwitch ? "left" : "right");
/* BEGIN ROGER TODO */
//
// // some special rule for S
// if(motherCat.equals("S") && kids[0].label().value().equals("PRELS"))
//return kids[0];
//
nonTerminalInfo.put("S", new String[][]{{left, "PRELS"}});
/* END ROGER TODO */
// these are first-cut rules
// there are non-unary nodes I put in
nonTerminalInfo.put("NUR", new String[][]{{left, "S"}});
// root -- yuk
nonTerminalInfo.put("ROOT", new String[][]{{left, "S", "CS", "VP", "CVP", "NP", "XY", "CNP", "DL", "AVP", "CAVP", "PN", "AP", "PP", "CO", "NN", "NE", "CPP", "CARD", "CH"}});
// in case a user's treebank has TOP instead of ROOT or unlabeled
nonTerminalInfo.put("TOP", new String[][]{{left, "S", "CS", "VP", "CVP", "NP", "XY", "CNP", "DL", "AVP", "CAVP", "PN", "AP", "PP", "CO", "NN", "NE", "CPP", "CARD", "CH"}});
// Major syntactic categories -- in order appearing in negra.export
nonTerminalInfo.put("NP", new String[][]{{right, "NN", "NE", "MPN", "NP", "CNP", "PN", "CAR"}}); // Basic heads are NN/NE/NP; CNP is coordination; CAR is cardinal
nonTerminalInfo.put("AP", new String[][]{{right, "ADJD", "ADJA", "CAP", "AA", "ADV"}}); // there is one ADJP unary rewrite to AD but otherwise all have JJ or ADJP
nonTerminalInfo.put("PP", new String[][]{{left, "KOKOM", "APPR", "PROAV"}});
//nonTerminalInfo.put("S", new String[][] {{right, "S","CS","NP"}}); //Most of the time, S has its head explicitly marked. CS is coordinated sentence. I don't fully understand the rest of "non-headed" german sentences to say much.
nonTerminalInfo.put("S", new String[][]{{right, "VMFIN", "VVFIN", "VAFIN", "VVIMP", "VAIMP" }, {"right", "VP","CVP"}, {"right", "S","CS"}}); // let finite verbs (including imperatives) be head always.
nonTerminalInfo.put("VP", new String[][]{{right, "VZ", "VAINF", "VMINF", "VVINF", "VVIZU", "VVPP", "VMPP", "VAPP", "PP"}}); // VP usually has explicit head marking; there's lots of garbage here to sort out, though.
nonTerminalInfo.put("VZ", new String[][]{{left, "PRTZU", "APPR","PTKZU"}}); // we could also try using the verb (on the right) instead of ZU as the head, maybe this would make more sense...
nonTerminalInfo.put("CO", new String[][]{{left}}); // this is an unlike coordination
nonTerminalInfo.put("AVP", new String[][]{{right, "ADV", "AVP", "ADJD", "PROAV", "PP"}});
nonTerminalInfo.put("AA", new String[][]{{right, "ADJD", "ADJA"}}); // superlative adjective phrase with "am"; I'm using the adjective not the "am" marker
nonTerminalInfo.put("CNP", new String[][]{{right, "NN", "NE", "MPN", "NP", "CNP", "PN", "CAR"}});
nonTerminalInfo.put("CAP", new String[][]{{right, "ADJD", "ADJA", "CAP", "AA", "ADV"}});
nonTerminalInfo.put("CPP", new String[][]{{right, "APPR", "PROAV", "PP", "CPP"}});
nonTerminalInfo.put("CS", new String[][]{{right, "S", "CS"}});
nonTerminalInfo.put("CVP", new String[][]{{right, "VP", "CVP"}}); // covers all examples
nonTerminalInfo.put("CVZ", new String[][]{{right, "VZ"}}); // covers all examples
nonTerminalInfo.put("CAVP", new String[][]{{right, "ADV", "AVP", "ADJD", "PWAV", "APPR", "PTKVZ"}});
nonTerminalInfo.put("MPN", new String[][]{{right, "NE", "FM", "CARD"}}); //presumably left/right doesn't matter
nonTerminalInfo.put("NM", new String[][]{{right, "CARD", "NN"}}); // covers all examples
nonTerminalInfo.put("CAC", new String[][]{{right, "APPR", "AVP"}}); //covers all examples
nonTerminalInfo.put("CH", new String[][]{{right}});
nonTerminalInfo.put("MTA", new String[][]{{right, "ADJA", "ADJD", "NN"}});
nonTerminalInfo.put("CCP", new String[][]{{right, "AVP"}});
nonTerminalInfo.put("DL", new String[][]{{left}}); // don't understand this one yet
nonTerminalInfo.put("ISU", new String[][]{{right}}); // idioms, I think
nonTerminalInfo.put("QL", new String[][]{{right}}); // these are all complicated numerical expressions I think
nonTerminalInfo.put("--", new String[][]{{right, "PP"}}); // a garbage conjoined phrase appearing once
// some POS tags apparently sit where phrases are supposed to be
nonTerminalInfo.put("CD", new String[][]{{right, "CD"}});
nonTerminalInfo.put("NN", new String[][]{{right, "NN"}});
nonTerminalInfo.put("NR", new String[][]{{right, "NR"}});
}
/* Some Negra local trees have an explicitly marked head. Use it if
* possible. */
protected Tree findMarkedHead(Tree[] kids) {
for (int i = 0, n = kids.length; i < n; i++) {
if (kids[i].label() instanceof NegraLabel && ((NegraLabel) kids[i].label()).getEdge() != null && ((NegraLabel) kids[i].label()).getEdge().equals("HD")) {
//System.err.println("found manually-labeled head");
return kids[i];
}
}
return null;
}
//Taken from AbstractTreebankLanguage pack b/c we have a slightly different definition of
//basic category for head finding - we strip grammatical function tags.
public String basicCategory(String category) {
if (category == null) {
return null;
}
return category.substring(0, postBasicCategoryIndex(category));
}
private int postBasicCategoryIndex(String category) {
boolean sawAtZero = false;
char seenAtZero = '\u0000';
int i = 0;
for (int leng = category.length(); i < leng; i++) {
char ch = category.charAt(i);
if (isLabelAnnotationIntroducingCharacter(ch)) {
if (i == 0) {
sawAtZero = true;
seenAtZero = ch;
} else if (sawAtZero && ch == seenAtZero) {
sawAtZero = false;
} else {
break;
}
}
}
return i;
}
/**
* Say whether this character is an annotation introducing
* character.
*
* @param ch The character to check
* @return Whether it is an annotation introducing character
*/
public boolean isLabelAnnotationIntroducingCharacter(char ch) {
char[] cutChars = tlp.labelAnnotationIntroducingCharacters();
for (char cutChar : cutChars) {
if (ch == cutChar) {
return true;
}
}
//for heads, there's one more char we want to check because we don't care about grammatical fns
if(ch == '-')
return true;
return false;
}
/** Called by determineHead and may be overridden in subclasses
* if special treatment is necessary for particular categories.
*/
protected Tree determineNonTrivialHead(Tree t, Tree parent) {
Tree theHead = null;
String motherCat = basicCategory(t.label().value());
if (DEBUG) {
System.err.println("Looking for head of " + t.label() +
"; value is |" + t.label().value() + "|, " +
" baseCat is |" + motherCat + "|");
}
// We know we have nonterminals underneath
// (a bit of a Penn Treebank assumption, but).
// Look at label.
String[][] how = nonTerminalInfo.get(motherCat);
if (how == null) {
if (DEBUG) {
System.err.println("Warning: No rule found for " + motherCat +
" (first char: " + motherCat.charAt(0) + ")");
System.err.println("Known nonterms are: " + nonTerminalInfo.keySet());
}
if (defaultRule != null) {
if (DEBUG) {
System.err.println(" Using defaultRule");
}
return traverseLocate(t.children(), defaultRule, true);
} else {
return null;
}
}
for (int i = 0; i < how.length; i++) {
boolean deflt = (i == how.length - 1);
theHead = traverseLocate(t.children(), how[i], deflt);
if (theHead != null) {
break;
}
}
if (DEBUG) {
System.err.println(" Chose " + theHead.label());
}
return theHead;
}
}
| |
package com.planet_ink.coffee_mud.Races;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2002-2022 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
public class Scorpion extends StdRace
{
@Override
public String ID()
{
return "Scorpion";
}
private final static String localizedStaticName = CMLib.lang().L("Scorpion");
@Override
public String name()
{
return localizedStaticName;
}
@Override
public int shortestMale()
{
return 4;
}
@Override
public int shortestFemale()
{
return 4;
}
@Override
public int heightVariance()
{
return 2;
}
@Override
public int lightestWeight()
{
return 5;
}
@Override
public int weightVariance()
{
return 5;
}
@Override
public long forbiddenWornBits()
{
return Integer.MAX_VALUE;
}
private final static String localizedStaticRacialCat = CMLib.lang().L("Arachnid");
@Override
public String racialCategory()
{
return localizedStaticRacialCat;
}
private final String[] racialAbilityNames = { "Poison_Heartstopper" };
private final int[] racialAbilityLevels = { 1 };
private final int[] racialAbilityProficiencies = { 10 };
private final boolean[] racialAbilityQuals = { false };
private final String[] racialAbilityParms = { "" };
@Override
public String[] racialAbilityNames()
{
return racialAbilityNames;
}
@Override
public int[] racialAbilityLevels()
{
return racialAbilityLevels;
}
@Override
public int[] racialAbilityProficiencies()
{
return racialAbilityProficiencies;
}
@Override
public boolean[] racialAbilityQuals()
{
return racialAbilityQuals;
}
@Override
public String[] racialAbilityParms()
{
return racialAbilityParms;
}
// an ey ea he ne ar ha to le fo no gi mo wa ta wi
private static final int[] parts={0 ,2 ,0 ,1 ,0 ,2 ,2 ,1 ,8 ,8 ,0 ,0 ,1 ,0 ,1 ,0 };
@Override
public int[] bodyMask()
{
return parts;
}
private final int[] agingChart = { 0, 0, 0, 1, 1, 1, 1, 2, 2 };
@Override
public int[] getAgingChart()
{
return agingChart;
}
private static Vector<RawMaterial> resources = new Vector<RawMaterial>();
@Override
public int availabilityCode()
{
return Area.THEME_FANTASY | Area.THEME_SKILLONLYMASK;
}
@Override
public void affectPhyStats(final Physical affected, final PhyStats affectableStats)
{
super.affectPhyStats(affected,affectableStats);
affectableStats.setDisposition(affectableStats.disposition()|PhyStats.IS_SNEAKING);
}
@Override
public void affectCharStats(final MOB affectedMOB, final CharStats affectableStats)
{
super.affectCharStats(affectedMOB, affectableStats);
affectableStats.setRacialStat(CharStats.STAT_STRENGTH,5);
affectableStats.setRacialStat(CharStats.STAT_DEXTERITY,5);
affectableStats.setRacialStat(CharStats.STAT_INTELLIGENCE,1);
affectableStats.setStat(CharStats.STAT_SAVE_POISON,affectableStats.getStat(CharStats.STAT_SAVE_POISON)+100);
}
@Override
public void unaffectCharStats(final MOB affectedMOB, final CharStats affectableStats)
{
super.unaffectCharStats(affectedMOB, affectableStats);
affectableStats.setStat(CharStats.STAT_STRENGTH,affectedMOB.baseCharStats().getStat(CharStats.STAT_STRENGTH));
affectableStats.setStat(CharStats.STAT_MAX_STRENGTH_ADJ,affectedMOB.baseCharStats().getStat(CharStats.STAT_MAX_STRENGTH_ADJ));
affectableStats.setStat(CharStats.STAT_DEXTERITY,affectedMOB.baseCharStats().getStat(CharStats.STAT_DEXTERITY));
affectableStats.setStat(CharStats.STAT_MAX_DEXTERITY_ADJ,affectedMOB.baseCharStats().getStat(CharStats.STAT_MAX_DEXTERITY_ADJ));
affectableStats.setStat(CharStats.STAT_INTELLIGENCE,affectedMOB.baseCharStats().getStat(CharStats.STAT_INTELLIGENCE));
affectableStats.setStat(CharStats.STAT_MAX_INTELLIGENCE_ADJ,affectedMOB.baseCharStats().getStat(CharStats.STAT_MAX_INTELLIGENCE_ADJ));
affectableStats.setStat(CharStats.STAT_SAVE_POISON,affectableStats.getStat(CharStats.STAT_SAVE_POISON)-100);
}
@Override
public String arriveStr()
{
return "creeps in";
}
@Override
public String leaveStr()
{
return "creeps";
}
@Override
public Weapon myNaturalWeapon()
{
if(naturalWeapon==null)
{
naturalWeapon=CMClass.getWeapon("StdWeapon");
naturalWeapon.setName(L("a nasty stinger"));
naturalWeapon.setMaterial(RawMaterial.RESOURCE_BONE);
naturalWeapon.setUsesRemaining(1000);
naturalWeapon.setWeaponDamageType(Weapon.TYPE_PIERCING);
}
return naturalWeapon;
}
@Override
public List<RawMaterial> myResources()
{
synchronized(resources)
{
if(resources.size()==0)
{
resources.addElement(makeResource
(L("some @x1 pincers",name().toLowerCase()),RawMaterial.RESOURCE_BONE));
}
}
return resources;
}
@Override
public String makeMobName(final char gender, final int age)
{
switch(age)
{
case Race.AGE_INFANT:
case Race.AGE_TODDLER:
case Race.AGE_CHILD:
return "baby "+name().toLowerCase();
default :
return super.makeMobName('N', age);
}
}
}
| |
/*
* Copyright (c) 2005-2010, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.synapse.rest;
import org.apache.http.protocol.HTTP;
import org.apache.synapse.MessageContext;
import org.apache.synapse.config.SynapseConfiguration;
import org.apache.synapse.rest.version.DefaultStrategy;
import org.apache.synapse.rest.version.URLBasedVersionStrategy;
public class APIDispatcherTest extends RESTMediationTestCase {
private static final String TEST_API = "TestAPI";
private static final String TEST_API_VERSION = "1.0.0";
public void testGeneralAPIDispatch() throws Exception {
API api = new API(TEST_API, "/");
SynapseConfiguration synapseConfig = new SynapseConfiguration();
synapseConfig.addAPI(TEST_API, api);
RESTRequestHandler handler = new RESTRequestHandler();
MessageContext synCtx = getMessageContext(synapseConfig, false, "/test", "GET");
handler.process(synCtx);
assertEquals(TEST_API, synCtx.getProperty(RESTConstants.SYNAPSE_REST_API));
synCtx = getMessageContext(synapseConfig, false, "/", "GET");
handler.process(synCtx);
assertEquals(TEST_API, synCtx.getProperty(RESTConstants.SYNAPSE_REST_API));
synCtx = getMessageContext(synapseConfig, false, "/foo/bar?a=5", "GET");
handler.process(synCtx);
assertEquals(TEST_API, synCtx.getProperty(RESTConstants.SYNAPSE_REST_API));
}
public void testBasicAPIDispatch() throws Exception {
API api = new API(TEST_API, "/test");
SynapseConfiguration synapseConfig = new SynapseConfiguration();
synapseConfig.addAPI(TEST_API, api);
RESTRequestHandler handler = new RESTRequestHandler();
// Messages with '/test' context should be dispatched
MessageContext synCtx = getMessageContext(synapseConfig, false, "/test", "GET");
handler.process(synCtx);
assertEquals(TEST_API, synCtx.getProperty(RESTConstants.SYNAPSE_REST_API));
synCtx = getMessageContext(synapseConfig, false, "/test/", "GET");
handler.process(synCtx);
assertEquals(TEST_API, synCtx.getProperty(RESTConstants.SYNAPSE_REST_API));
synCtx = getMessageContext(synapseConfig, false, "/test/foo/bar?a=5", "GET");
handler.process(synCtx);
assertEquals(TEST_API, synCtx.getProperty(RESTConstants.SYNAPSE_REST_API));
synCtx = getMessageContext(synapseConfig, false, "/test?a=5", "GET");
handler.process(synCtx);
assertEquals(TEST_API, synCtx.getProperty(RESTConstants.SYNAPSE_REST_API));
// Messages WITHOUT the '/test' context should NOT be dispatched
synCtx = getMessageContext(synapseConfig, false, "/foo/test/bar?a=5", "GET");
handler.process(synCtx);
assertNull(synCtx.getProperty(RESTConstants.SYNAPSE_REST_API));
synCtx = getMessageContext(synapseConfig, false, "/test1/bar?a=5", "GET");
handler.process(synCtx);
assertNull(synCtx.getProperty(RESTConstants.SYNAPSE_REST_API));
}
public void testResponseDispatch() throws Exception {
API api = new API(TEST_API, "/test");
SynapseConfiguration synapseConfig = new SynapseConfiguration();
synapseConfig.addAPI(TEST_API, api);
RESTRequestHandler handler = new RESTRequestHandler();
// Messages with '/test' context should ne dispatched
MessageContext synCtx = getMessageContext(synapseConfig, false, "/test", "GET");
synCtx.setResponse(true);
assertFalse(handler.process(synCtx));
synCtx.setProperty(RESTConstants.SYNAPSE_REST_API, TEST_API);
assertTrue(handler.process(synCtx));
}
public void testHostBasedAPIDispatch() throws Exception {
API api = new API(TEST_API, "/test");
api.setHost("synapse.apache.org");
SynapseConfiguration synapseConfig = new SynapseConfiguration();
synapseConfig.addAPI(TEST_API, api);
RESTRequestHandler handler = new RESTRequestHandler();
// Messages that don't have the proper host set should not be dispatched
MessageContext synCtx = getMessageContext(synapseConfig, false, "/test", "GET");
handler.process(synCtx);
assertNull(synCtx.getProperty(RESTConstants.SYNAPSE_REST_API));
// Messages with the correct host should be dispatched
synCtx = getMessageContext(synapseConfig, false, "/test/", "GET");
addHttpHeader(HTTP.TARGET_HOST, "synapse.apache.org", synCtx);
handler.process(synCtx);
assertEquals(TEST_API, synCtx.getProperty(RESTConstants.SYNAPSE_REST_API));
// API should be able to infer the default HTTP port
api.setPort(80);
handler.process(synCtx);
assertEquals(TEST_API, synCtx.getProperty(RESTConstants.SYNAPSE_REST_API));
// Messages with an incorrect port number should not be dispatched
synCtx = getMessageContext(synapseConfig, false, "/test/foo/bar?a=5", "GET");
addHttpHeader(HTTP.TARGET_HOST, "synapse.apache.org:8280", synCtx);
handler.process(synCtx);
assertNull(synCtx.getProperty(RESTConstants.SYNAPSE_REST_API));
// Messages with the correct port number should be dispatched
api.setPort(8280);
handler.process(synCtx);
assertEquals(TEST_API, synCtx.getProperty(RESTConstants.SYNAPSE_REST_API));
api.setPort(443);
synCtx = getMessageContext(synapseConfig, false, "/test/foo/bar?a=5", "GET");
addHttpHeader(HTTP.TARGET_HOST, "synapse.apache.org", synCtx);
handler.process(synCtx);
assertNull(synCtx.getProperty(RESTConstants.SYNAPSE_REST_API));
// API should accurately infer the default HTTPS port
synCtx = getMessageContext(synapseConfig, true, "/test/foo/bar?a=5", "GET");
addHttpHeader(HTTP.TARGET_HOST, "synapse.apache.org", synCtx);
handler.process(synCtx);
assertEquals(TEST_API, synCtx.getProperty(RESTConstants.SYNAPSE_REST_API));
}
public void testMultipleAPIDispatch() throws Exception {
String apiName1 = "TestAPI1";
String apiName2 = "TestAPI2";
String apiName3 = "TestAPI3";
API api1 = new API(apiName1, "/test");
API api2 = new API(apiName2, "/dictionary");
api2.setHost("synapse.apache.org");
API api3 = new API(apiName3, "/foo/bar");
SynapseConfiguration synapseConfig = new SynapseConfiguration();
synapseConfig.addAPI(apiName1, api1);
synapseConfig.addAPI(apiName2, api2);
synapseConfig.addAPI(apiName3, api3);
RESTRequestHandler handler = new RESTRequestHandler();
MessageContext synCtx = getMessageContext(synapseConfig, false, "/test", "GET");
handler.process(synCtx);
assertEquals(apiName1, synCtx.getProperty(RESTConstants.SYNAPSE_REST_API));
synCtx = getMessageContext(synapseConfig, false, "/dictionary/c/cat", "GET");
addHttpHeader(HTTP.TARGET_HOST, "synapse.apache.org", synCtx);
handler.process(synCtx);
assertEquals(apiName2, synCtx.getProperty(RESTConstants.SYNAPSE_REST_API));
synCtx = getMessageContext(synapseConfig, false, "/foo/bar/index.jsp?user=test", "GET");
handler.process(synCtx);
assertEquals(apiName3, synCtx.getProperty(RESTConstants.SYNAPSE_REST_API));
synCtx = getMessageContext(synapseConfig, false, "/foo/index.jsp?user=test", "GET");
handler.process(synCtx);
assertNull(synCtx.getProperty(RESTConstants.SYNAPSE_REST_API));
}
public void testAPIDefaultVersionBasedDispatch() throws Exception {
API api = new API(TEST_API, "/test");
api.setVersionStrategy(new DefaultStrategy(api));
SynapseConfiguration synapseConfig = new SynapseConfiguration();
synapseConfig.addAPI(api.getName(), api);
RESTRequestHandler handler = new RESTRequestHandler();
// Messages with '/test' context should ne dispatched
MessageContext synCtx = getMessageContext(synapseConfig, false, "/test", "GET");
handler.process(synCtx);
assertEquals(TEST_API, synCtx.getProperty(RESTConstants.SYNAPSE_REST_API));
assertEquals("",synCtx.getProperty(RESTConstants.SYNAPSE_REST_API_VERSION));
synCtx = getMessageContext(synapseConfig, false, "/test/", "GET");
handler.process(synCtx);
assertEquals(TEST_API, synCtx.getProperty(RESTConstants.SYNAPSE_REST_API));
assertEquals("",synCtx.getProperty(RESTConstants.SYNAPSE_REST_API_VERSION));
synCtx = getMessageContext(synapseConfig, false, "/test/foo/bar?a=5", "GET");
handler.process(synCtx);
assertEquals(TEST_API, synCtx.getProperty(RESTConstants.SYNAPSE_REST_API));
assertEquals("",synCtx.getProperty(RESTConstants.SYNAPSE_REST_API_VERSION));
synCtx = getMessageContext(synapseConfig, false, "/test?a=5", "GET");
handler.process(synCtx);
assertEquals(TEST_API, synCtx.getProperty(RESTConstants.SYNAPSE_REST_API));
assertEquals("",synCtx.getProperty(RESTConstants.SYNAPSE_REST_API_VERSION));
// Messages WITHOUT the '/test' context should NOT be dispatched
synCtx = getMessageContext(synapseConfig, false, "/foo/test/bar?a=5", "GET");
handler.process(synCtx);
assertNull(synCtx.getProperty(RESTConstants.SYNAPSE_REST_API));
assertNull(synCtx.getProperty(RESTConstants.SYNAPSE_REST_API_VERSION));
synCtx = getMessageContext(synapseConfig, false, "/test1/bar?a=5", "GET");
handler.process(synCtx);
assertNull(synCtx.getProperty(RESTConstants.SYNAPSE_REST_API));
assertNull(synCtx.getProperty(RESTConstants.SYNAPSE_REST_API_VERSION));
}
public void testAPIURLVersionBasedDispatch() throws Exception {
API api = new API(TEST_API, "/test");
api.setVersionStrategy(new URLBasedVersionStrategy(api,TEST_API_VERSION,null));
SynapseConfiguration synapseConfig = new SynapseConfiguration();
synapseConfig.addAPI(api.getName(), api);
RESTRequestHandler handler = new RESTRequestHandler();
// Messages with '/test' context should NOT be dispatched
MessageContext synCtx = getMessageContext(synapseConfig, false, "/test/", "GET");
handler.process(synCtx);
assertNull(synCtx.getProperty(RESTConstants.SYNAPSE_REST_API));
assertNull(synCtx.getProperty(RESTConstants.SYNAPSE_REST_API_VERSION));
synCtx = getMessageContext(synapseConfig, false, "/test/1.0.0", "GET");
handler.process(synCtx);
assertEquals(api.getName(), synCtx.getProperty(RESTConstants.SYNAPSE_REST_API));
assertEquals(TEST_API_VERSION,synCtx.getProperty(RESTConstants.SYNAPSE_REST_API_VERSION));
synCtx = getMessageContext(synapseConfig, false, "/test/1.0.0/", "GET");
handler.process(synCtx);
assertEquals(api.getName(), synCtx.getProperty(RESTConstants.SYNAPSE_REST_API));
assertEquals(TEST_API_VERSION,synCtx.getProperty(RESTConstants.SYNAPSE_REST_API_VERSION));
synCtx = getMessageContext(synapseConfig, false, "/test/1.0.0/foo/bar?a=5", "GET");
handler.process(synCtx);
assertEquals(api.getName(), synCtx.getProperty(RESTConstants.SYNAPSE_REST_API));
assertEquals(TEST_API_VERSION,synCtx.getProperty(RESTConstants.SYNAPSE_REST_API_VERSION));
synCtx = getMessageContext(synapseConfig, false, "/test/1.0.0?a=5", "GET");
handler.process(synCtx);
assertEquals(api.getName(), synCtx.getProperty(RESTConstants.SYNAPSE_REST_API));
assertEquals(TEST_API_VERSION,synCtx.getProperty(RESTConstants.SYNAPSE_REST_API_VERSION));
//Message with '/test' context & URL as a Query Parameter should be dispatched
synCtx = getMessageContext(synapseConfig, false, "/test/1.0.0?a=http://localhost.com", "GET");
handler.process(synCtx);
assertEquals(api.getName(), synCtx.getProperty(RESTConstants.SYNAPSE_REST_API));
assertEquals(TEST_API_VERSION,synCtx.getProperty(RESTConstants.SYNAPSE_REST_API_VERSION));
// Messages WITHOUT the '/test' context should NOT be dispatched
synCtx = getMessageContext(synapseConfig, false, "/foo/test/bar?a=5", "GET");
handler.process(synCtx);
assertNull(synCtx.getProperty(RESTConstants.SYNAPSE_REST_API));
assertNull(synCtx.getProperty(RESTConstants.SYNAPSE_REST_API_VERSION));
// Messages WITHOUT the '/test' context and proper version should NOT be dispatched
synCtx = getMessageContext(synapseConfig, false, "/test/1.0.1/foo/bar?a=5", "GET");
handler.process(synCtx);
assertNull(synCtx.getProperty(RESTConstants.SYNAPSE_REST_API));
assertNull(synCtx.getProperty(RESTConstants.SYNAPSE_REST_API_VERSION));
synCtx = getMessageContext(synapseConfig, false, "/test/2.0/foo/bar?a=5", "GET");
handler.process(synCtx);
assertNull(synCtx.getProperty(RESTConstants.SYNAPSE_REST_API));
assertNull(synCtx.getProperty(RESTConstants.SYNAPSE_REST_API_VERSION));
synCtx = getMessageContext(synapseConfig, false, "/test/2.0.0.0/foo/bar?a=5", "GET");
handler.process(synCtx);
assertNull(synCtx.getProperty(RESTConstants.SYNAPSE_REST_API));
assertNull(synCtx.getProperty(RESTConstants.SYNAPSE_REST_API_VERSION));
}
}
| |
/*
* ProGuard -- shrinking, optimization, obfuscation, and preverification
* of Java bytecode.
*
* Copyright (c) 2002-2011 Eric Lafortune (eric@graphics.cornell.edu)
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
* Software Foundation; either version 2 of the License, or (at your option)
* any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package proguard.shrink;
import proguard.classfile.*;
import proguard.classfile.attribute.*;
import proguard.classfile.attribute.annotation.*;
import proguard.classfile.attribute.preverification.*;
import proguard.classfile.attribute.preverification.visitor.*;
import proguard.classfile.attribute.visitor.*;
import proguard.classfile.constant.*;
import proguard.classfile.constant.visitor.*;
import proguard.classfile.instruction.*;
import proguard.classfile.instruction.visitor.InstructionVisitor;
import proguard.classfile.util.*;
import proguard.classfile.visitor.*;
/**
* This ClassVisitor and MemberVisitor recursively marks all classes and class
* elements that are being used.
*
* @see ClassShrinker
*
* @author Eric Lafortune
*/
class UsageMarker
extends SimplifiedVisitor
implements ClassVisitor,
MemberVisitor,
ConstantVisitor,
AttributeVisitor,
InnerClassesInfoVisitor,
ExceptionInfoVisitor,
StackMapFrameVisitor,
VerificationTypeVisitor,
LocalVariableInfoVisitor,
LocalVariableTypeInfoVisitor,
// AnnotationVisitor,
// ElementValueVisitor,
InstructionVisitor
{
// A visitor info flag to indicate the ProgramMember object is being used,
// if its Clazz can be determined as being used as well.
private static final Object POSSIBLY_USED = new Object();
// A visitor info flag to indicate the visitor accepter is being used.
private static final Object USED = new Object();
private final MyInterfaceUsageMarker interfaceUsageMarker = new MyInterfaceUsageMarker();
private final MyPossiblyUsedMemberUsageMarker possiblyUsedMemberUsageMarker = new MyPossiblyUsedMemberUsageMarker();
private final MemberVisitor nonEmptyMethodUsageMarker = new AllAttributeVisitor(
new MyNonEmptyMethodUsageMarker());
private final ConstantVisitor parameterlessConstructorMarker = new ConstantTagFilter(new int[] { ClassConstants.CONSTANT_String, ClassConstants.CONSTANT_Class },
new ReferencedClassVisitor(
new NamedMethodVisitor(ClassConstants.INTERNAL_METHOD_NAME_INIT,
ClassConstants.INTERNAL_METHOD_TYPE_INIT,
this)));
// Implementations for ClassVisitor.
public void visitProgramClass(ProgramClass programClass)
{
if (shouldBeMarkedAsUsed(programClass))
{
// Mark this class.
markAsUsed(programClass);
markProgramClassBody(programClass);
}
}
protected void markProgramClassBody(ProgramClass programClass)
{
// Mark this class's name.
markConstant(programClass, programClass.u2thisClass);
// Mark the superclass.
if (programClass.u2superClass != 0)
{
markConstant(programClass, programClass.u2superClass);
}
// Give the interfaces preliminary marks.
programClass.hierarchyAccept(false, false, true, false,
interfaceUsageMarker);
// Explicitly mark the <clinit> method, if it's not empty.
programClass.methodAccept(ClassConstants.INTERNAL_METHOD_NAME_CLINIT,
ClassConstants.INTERNAL_METHOD_TYPE_CLINIT,
nonEmptyMethodUsageMarker);
// Process all class members that have already been marked as possibly used.
programClass.fieldsAccept(possiblyUsedMemberUsageMarker);
programClass.methodsAccept(possiblyUsedMemberUsageMarker);
// Mark the attributes.
programClass.attributesAccept(this);
}
public void visitLibraryClass(LibraryClass libraryClass)
{
if (shouldBeMarkedAsUsed(libraryClass))
{
markAsUsed(libraryClass);
// We're not going to analyze all library code. We're assuming that
// if this class is being used, all of its methods will be used as
// well. We'll mark them as such (here and in all subclasses).
// Mark the superclass.
Clazz superClass = libraryClass.superClass;
if (superClass != null)
{
superClass.accept(this);
}
// Mark the interfaces.
Clazz[] interfaceClasses = libraryClass.interfaceClasses;
if (interfaceClasses != null)
{
for (int index = 0; index < interfaceClasses.length; index++)
{
if (interfaceClasses[index] != null)
{
interfaceClasses[index].accept(this);
}
}
}
// Mark all methods.
libraryClass.methodsAccept(this);
}
}
/**
* This ClassVisitor marks ProgramClass objects as possibly used,
* and it visits LibraryClass objects with its outer UsageMarker.
*/
private class MyInterfaceUsageMarker
implements ClassVisitor
{
public void visitProgramClass(ProgramClass programClass)
{
if (shouldBeMarkedAsPossiblyUsed(programClass))
{
// We can't process the interface yet, because it might not
// be required. Give it a preliminary mark.
markAsPossiblyUsed(programClass);
}
}
public void visitLibraryClass(LibraryClass libraryClass)
{
// Make sure all library interface methods are marked.
UsageMarker.this.visitLibraryClass(libraryClass);
}
}
/**
* This MemberVisitor marks ProgramField and ProgramMethod objects that
* have already been marked as possibly used.
*/
private class MyPossiblyUsedMemberUsageMarker
extends SimplifiedVisitor
implements MemberVisitor
{
// Implementations for MemberVisitor.
public void visitProgramField(ProgramClass programClass, ProgramField programField)
{
// Has the method already been referenced?
if (isPossiblyUsed(programField))
{
markAsUsed(programField);
// Mark the name and descriptor.
markConstant(programClass, programField.u2nameIndex);
markConstant(programClass, programField.u2descriptorIndex);
// Mark the attributes.
programField.attributesAccept(programClass, UsageMarker.this);
// Mark the classes referenced in the descriptor string.
programField.referencedClassesAccept(UsageMarker.this);
}
}
public void visitProgramMethod(ProgramClass programClass, ProgramMethod programMethod)
{
// Has the method already been referenced?
if (isPossiblyUsed(programMethod))
{
markAsUsed(programMethod);
// Mark the method body.
markProgramMethodBody(programClass, programMethod);
// Note that, if the method has been marked as possibly used,
// the method hierarchy has already been marked (cfr. below).
}
}
}
/**
* This AttributeVisitor marks ProgramMethod objects of non-empty methods.
*/
private class MyNonEmptyMethodUsageMarker
extends SimplifiedVisitor
implements AttributeVisitor
{
// Implementations for AttributeVisitor.
public void visitAnyAttribute(Clazz clazz, Attribute attribute) {}
public void visitCodeAttribute(Clazz clazz, Method method, CodeAttribute codeAttribute)
{
if (codeAttribute.u4codeLength > 1)
{
method.accept(clazz, UsageMarker.this);
}
}
}
// Implementations for MemberVisitor.
public void visitProgramField(ProgramClass programClass, ProgramField programField)
{
if (shouldBeMarkedAsUsed(programField))
{
// Is the field's class used?
if (isUsed(programClass))
{
markAsUsed(programField);
// Mark the field body.
markProgramFieldBody(programClass, programField);
}
// Hasn't the field been marked as possibly being used yet?
else if (shouldBeMarkedAsPossiblyUsed(programField))
{
// We can't process the field yet, because the class isn't
// marked as being used (yet). Give it a preliminary mark.
markAsPossiblyUsed(programField);
}
}
}
public void visitProgramMethod(ProgramClass programClass, ProgramMethod programMethod)
{
if (shouldBeMarkedAsUsed(programMethod))
{
// Is the method's class used?
if (isUsed(programClass))
{
markAsUsed(programMethod);
// Mark the method body.
markProgramMethodBody(programClass, programMethod);
// Mark the method hierarchy.
markMethodHierarchy(programClass, programMethod);
}
// Hasn't the method been marked as possibly being used yet?
else if (shouldBeMarkedAsPossiblyUsed(programMethod))
{
// We can't process the method yet, because the class isn't
// marked as being used (yet). Give it a preliminary mark.
markAsPossiblyUsed(programMethod);
// Mark the method hierarchy.
markMethodHierarchy(programClass, programMethod);
}
}
}
public void visitLibraryField(LibraryClass programClass, LibraryField programField) {}
public void visitLibraryMethod(LibraryClass libraryClass, LibraryMethod libraryMethod)
{
if (shouldBeMarkedAsUsed(libraryMethod))
{
markAsUsed(libraryMethod);
// Mark the method hierarchy.
markMethodHierarchy(libraryClass, libraryMethod);
}
}
protected void markProgramFieldBody(ProgramClass programClass, ProgramField programField)
{
// Mark the name and descriptor.
markConstant(programClass, programField.u2nameIndex);
markConstant(programClass, programField.u2descriptorIndex);
// Mark the attributes.
programField.attributesAccept(programClass, this);
// Mark the classes referenced in the descriptor string.
programField.referencedClassesAccept(this);
}
protected void markProgramMethodBody(ProgramClass programClass, ProgramMethod programMethod)
{
// Mark the name and descriptor.
markConstant(programClass, programMethod.u2nameIndex);
markConstant(programClass, programMethod.u2descriptorIndex);
// Mark the attributes.
programMethod.attributesAccept(programClass, this);
// Mark the classes referenced in the descriptor string.
programMethod.referencedClassesAccept(this);
}
/**
* Marks the hierarchy of implementing or overriding methods corresponding
* to the given method, if any.
*/
protected void markMethodHierarchy(Clazz clazz, Method method)
{
if ((method.getAccessFlags() &
(ClassConstants.INTERNAL_ACC_PRIVATE |
ClassConstants.INTERNAL_ACC_STATIC)) == 0 &&
!ClassUtil.isInitializer(method.getName(clazz)))
{
clazz.accept(new ConcreteClassDownTraveler(
new ClassHierarchyTraveler(true, true, false, true,
new NamedMethodVisitor(method.getName(clazz),
method.getDescriptor(clazz),
new MemberAccessFilter(0, ClassConstants.INTERNAL_ACC_PRIVATE | ClassConstants.INTERNAL_ACC_STATIC | ClassConstants.INTERNAL_ACC_ABSTRACT,
this)))));
}
}
// Implementations for ConstantVisitor.
public void visitIntegerConstant(Clazz clazz, IntegerConstant integerConstant)
{
if (shouldBeMarkedAsUsed(integerConstant))
{
markAsUsed(integerConstant);
}
}
public void visitLongConstant(Clazz clazz, LongConstant longConstant)
{
if (shouldBeMarkedAsUsed(longConstant))
{
markAsUsed(longConstant);
}
}
public void visitFloatConstant(Clazz clazz, FloatConstant floatConstant)
{
if (shouldBeMarkedAsUsed(floatConstant))
{
markAsUsed(floatConstant);
}
}
public void visitDoubleConstant(Clazz clazz, DoubleConstant doubleConstant)
{
if (shouldBeMarkedAsUsed(doubleConstant))
{
markAsUsed(doubleConstant);
}
}
public void visitStringConstant(Clazz clazz, StringConstant stringConstant)
{
if (shouldBeMarkedAsUsed(stringConstant))
{
markAsUsed(stringConstant);
markConstant(clazz, stringConstant.u2stringIndex);
// Mark the referenced class and class member, if any.
stringConstant.referencedClassAccept(this);
stringConstant.referencedMemberAccept(this);
}
}
public void visitUtf8Constant(Clazz clazz, Utf8Constant utf8Constant)
{
if (shouldBeMarkedAsUsed(utf8Constant))
{
markAsUsed(utf8Constant);
}
}
public void visitInvokeDynamicConstant(Clazz clazz, InvokeDynamicConstant invokeDynamicConstant)
{
if (shouldBeMarkedAsUsed(invokeDynamicConstant))
{
markAsUsed(invokeDynamicConstant);
markConstant(clazz, invokeDynamicConstant.u2nameAndTypeIndex);
// Mark the bootstrap methods attribute.
clazz.attributesAccept(new MyBootStrapMethodUsageMarker(invokeDynamicConstant.u2bootstrapMethodAttributeIndex));
}
}
public void visitMethodHandleConstant(Clazz clazz, MethodHandleConstant methodHandleConstant)
{
if (shouldBeMarkedAsUsed(methodHandleConstant))
{
markAsUsed(methodHandleConstant);
markConstant(clazz, methodHandleConstant.u2referenceIndex);
}
}
public void visitAnyRefConstant(Clazz clazz, RefConstant refConstant)
{
if (shouldBeMarkedAsUsed(refConstant))
{
markAsUsed(refConstant);
markConstant(clazz, refConstant.u2classIndex);
markConstant(clazz, refConstant.u2nameAndTypeIndex);
// When compiled with "-target 1.2" or higher, the class or
// interface actually containing the referenced class member may
// be higher up the hierarchy. Make sure it's marked, in case it
// isn't used elsewhere.
refConstant.referencedClassAccept(this);
// Mark the referenced class member itself.
refConstant.referencedMemberAccept(this);
}
}
public void visitClassConstant(Clazz clazz, ClassConstant classConstant)
{
if (shouldBeMarkedAsUsed(classConstant))
{
markAsUsed(classConstant);
markConstant(clazz, classConstant.u2nameIndex);
// Mark the referenced class itself.
classConstant.referencedClassAccept(this);
}
}
public void visitMethodTypeConstant(Clazz clazz, MethodTypeConstant methodTypeConstant)
{
if (shouldBeMarkedAsUsed(methodTypeConstant))
{
markAsUsed(methodTypeConstant);
markConstant(clazz, methodTypeConstant.u2descriptorIndex);
}
}
public void visitNameAndTypeConstant(Clazz clazz, NameAndTypeConstant nameAndTypeConstant)
{
if (shouldBeMarkedAsUsed(nameAndTypeConstant))
{
markAsUsed(nameAndTypeConstant);
markConstant(clazz, nameAndTypeConstant.u2nameIndex);
markConstant(clazz, nameAndTypeConstant.u2descriptorIndex);
}
}
/**
* This AttributeVisitor marks the bootstrap methods attributes, their
* method entries, their method handles, and their arguments.
*/
private class MyBootStrapMethodUsageMarker
extends SimplifiedVisitor
implements AttributeVisitor,
BootstrapMethodInfoVisitor
{
private int bootstrapMethodIndex;
private MyBootStrapMethodUsageMarker(int bootstrapMethodIndex)
{
this.bootstrapMethodIndex = bootstrapMethodIndex;
}
// Implementations for AttributeVisitor.
public void visitAnyAttribute(Clazz clazz, Attribute attribute) {}
public void visitBootstrapMethodsAttribute(Clazz clazz, BootstrapMethodsAttribute bootstrapMethodsAttribute)
{
if (shouldBeMarkedAsUsed(bootstrapMethodsAttribute))
{
markAsUsed(bootstrapMethodsAttribute);
markConstant(clazz, bootstrapMethodsAttribute.u2attributeNameIndex);
bootstrapMethodsAttribute.bootstrapMethodEntryAccept(clazz,
bootstrapMethodIndex,
this);
}
}
// Implementations for BootstrapMethodInfoVisitor.
public void visitBootstrapMethodInfo(Clazz clazz, BootstrapMethodInfo bootstrapMethodInfo)
{
markAsUsed(bootstrapMethodInfo);
markConstant(clazz, bootstrapMethodInfo.u2methodHandleIndex);
// Mark the constant pool entries referenced by the arguments.
bootstrapMethodInfo.methodArgumentsAccept(clazz, UsageMarker.this);
}
}
// Implementations for AttributeVisitor.
// Note that attributes are typically only referenced once, so we don't
// test if they have been marked already.
public void visitUnknownAttribute(Clazz clazz, UnknownAttribute unknownAttribute)
{
// This is the best we can do for unknown attributes.
markAsUsed(unknownAttribute);
markConstant(clazz, unknownAttribute.u2attributeNameIndex);
}
public void visitBootstrapMethodsAttribute(Clazz clazz, BootstrapMethodsAttribute bootstrapMethodsAttribute)
{
// Don't mark the attribute and its name here. We may mark it in
// MyBootStrapMethodsAttributeUsageMarker.
}
public void visitSourceFileAttribute(Clazz clazz, SourceFileAttribute sourceFileAttribute)
{
markAsUsed(sourceFileAttribute);
markConstant(clazz, sourceFileAttribute.u2attributeNameIndex);
markConstant(clazz, sourceFileAttribute.u2sourceFileIndex);
}
public void visitSourceDirAttribute(Clazz clazz, SourceDirAttribute sourceDirAttribute)
{
markAsUsed(sourceDirAttribute);
markConstant(clazz, sourceDirAttribute.u2attributeNameIndex);
markConstant(clazz, sourceDirAttribute.u2sourceDirIndex);
}
public void visitInnerClassesAttribute(Clazz clazz, InnerClassesAttribute innerClassesAttribute)
{
// Don't mark the attribute and its name yet. We may mark it later, in
// InnerUsageMarker.
//markAsUsed(innerClassesAttribute);
//markConstant(clazz, innerClassesAttribute.u2attrNameIndex);
// Do mark the outer class entries.
innerClassesAttribute.innerClassEntriesAccept(clazz, this);
}
public void visitEnclosingMethodAttribute(Clazz clazz, EnclosingMethodAttribute enclosingMethodAttribute)
{
markAsUsed(enclosingMethodAttribute);
markConstant(clazz, enclosingMethodAttribute.u2attributeNameIndex);
markConstant(clazz, enclosingMethodAttribute.u2classIndex);
if (enclosingMethodAttribute.u2nameAndTypeIndex != 0)
{
markConstant(clazz, enclosingMethodAttribute.u2nameAndTypeIndex);
}
}
public void visitDeprecatedAttribute(Clazz clazz, DeprecatedAttribute deprecatedAttribute)
{
markAsUsed(deprecatedAttribute);
markConstant(clazz, deprecatedAttribute.u2attributeNameIndex);
}
public void visitSyntheticAttribute(Clazz clazz, SyntheticAttribute syntheticAttribute)
{
markAsUsed(syntheticAttribute);
markConstant(clazz, syntheticAttribute.u2attributeNameIndex);
}
public void visitSignatureAttribute(Clazz clazz, SignatureAttribute signatureAttribute)
{
markAsUsed(signatureAttribute);
markConstant(clazz, signatureAttribute.u2attributeNameIndex);
markConstant(clazz, signatureAttribute.u2signatureIndex);
}
public void visitConstantValueAttribute(Clazz clazz, Field field, ConstantValueAttribute constantValueAttribute)
{
markAsUsed(constantValueAttribute);
markConstant(clazz, constantValueAttribute.u2attributeNameIndex);
markConstant(clazz, constantValueAttribute.u2constantValueIndex);
}
public void visitExceptionsAttribute(Clazz clazz, Method method, ExceptionsAttribute exceptionsAttribute)
{
markAsUsed(exceptionsAttribute);
markConstant(clazz, exceptionsAttribute.u2attributeNameIndex);
// Mark the constant pool entries referenced by the exceptions.
exceptionsAttribute.exceptionEntriesAccept((ProgramClass)clazz, this);
}
public void visitCodeAttribute(Clazz clazz, Method method, CodeAttribute codeAttribute)
{
markAsUsed(codeAttribute);
markConstant(clazz, codeAttribute.u2attributeNameIndex);
// Mark the constant pool entries referenced by the instructions,
// by the exceptions, and by the attributes.
codeAttribute.instructionsAccept(clazz, method, this);
codeAttribute.exceptionsAccept(clazz, method, this);
codeAttribute.attributesAccept(clazz, method, this);
}
public void visitStackMapAttribute(Clazz clazz, Method method, CodeAttribute codeAttribute, StackMapAttribute stackMapAttribute)
{
markAsUsed(stackMapAttribute);
markConstant(clazz, stackMapAttribute.u2attributeNameIndex);
// Mark the constant pool entries referenced by the stack map frames.
stackMapAttribute.stackMapFramesAccept(clazz, method, codeAttribute, this);
}
public void visitStackMapTableAttribute(Clazz clazz, Method method, CodeAttribute codeAttribute, StackMapTableAttribute stackMapTableAttribute)
{
markAsUsed(stackMapTableAttribute);
markConstant(clazz, stackMapTableAttribute.u2attributeNameIndex);
// Mark the constant pool entries referenced by the stack map frames.
stackMapTableAttribute.stackMapFramesAccept(clazz, method, codeAttribute, this);
}
public void visitLineNumberTableAttribute(Clazz clazz, Method method, CodeAttribute codeAttribute, LineNumberTableAttribute lineNumberTableAttribute)
{
markAsUsed(lineNumberTableAttribute);
markConstant(clazz, lineNumberTableAttribute.u2attributeNameIndex);
}
public void visitLocalVariableTableAttribute(Clazz clazz, Method method, CodeAttribute codeAttribute, LocalVariableTableAttribute localVariableTableAttribute)
{
markAsUsed(localVariableTableAttribute);
markConstant(clazz, localVariableTableAttribute.u2attributeNameIndex);
// Mark the constant pool entries referenced by the local variables.
localVariableTableAttribute.localVariablesAccept(clazz, method, codeAttribute, this);
}
public void visitLocalVariableTypeTableAttribute(Clazz clazz, Method method, CodeAttribute codeAttribute, LocalVariableTypeTableAttribute localVariableTypeTableAttribute)
{
markAsUsed(localVariableTypeTableAttribute);
markConstant(clazz, localVariableTypeTableAttribute.u2attributeNameIndex);
// Mark the constant pool entries referenced by the local variable types.
localVariableTypeTableAttribute.localVariablesAccept(clazz, method, codeAttribute, this);
}
public void visitAnyAnnotationsAttribute(Clazz clazz, AnnotationsAttribute annotationsAttribute)
{
// Don't mark the attribute and its contents yet. We may mark them later,
// in AnnotationUsageMarker.
// markAsUsed(annotationsAttribute);
//
// markConstant(clazz, annotationsAttribute.u2attributeNameIndex);
//
// // Mark the constant pool entries referenced by the annotations.
// annotationsAttribute.annotationsAccept(clazz, this);
}
public void visitAnyParameterAnnotationsAttribute(Clazz clazz, Method method, ParameterAnnotationsAttribute parameterAnnotationsAttribute)
{
// Don't mark the attribute and its contents yet. We may mark them later,
// in AnnotationUsageMarker.
// markAsUsed(parameterAnnotationsAttribute);
//
// markConstant(clazz, parameterAnnotationsAttribute.u2attributeNameIndex);
//
// // Mark the constant pool entries referenced by the annotations.
// parameterAnnotationsAttribute.annotationsAccept(clazz, method, this);
}
public void visitAnnotationDefaultAttribute(Clazz clazz, Method method, AnnotationDefaultAttribute annotationDefaultAttribute)
{
// Don't mark the attribute and its contents yet. We may mark them later,
// in AnnotationUsageMarker.
// markAsUsed(annotationDefaultAttribute);
//
// markConstant(clazz, annotationDefaultAttribute.u2attributeNameIndex);
//
// // Mark the constant pool entries referenced by the element value.
// annotationDefaultAttribute.defaultValueAccept(clazz, this);
}
// Implementations for ExceptionInfoVisitor.
public void visitExceptionInfo(Clazz clazz, Method method, CodeAttribute codeAttribute, ExceptionInfo exceptionInfo)
{
markAsUsed(exceptionInfo);
if (exceptionInfo.u2catchType != 0)
{
markConstant(clazz, exceptionInfo.u2catchType);
}
}
// Implementations for InnerClassesInfoVisitor.
public void visitInnerClassesInfo(Clazz clazz, InnerClassesInfo innerClassesInfo)
{
// At this point, we only mark outer classes of this class.
// Inner class can be marked later, by InnerUsageMarker.
if (innerClassesInfo.u2innerClassIndex != 0 &&
clazz.getName().equals(clazz.getClassName(innerClassesInfo.u2innerClassIndex)))
{
markAsUsed(innerClassesInfo);
innerClassesInfo.innerClassConstantAccept(clazz, this);
innerClassesInfo.outerClassConstantAccept(clazz, this);
innerClassesInfo.innerNameConstantAccept(clazz, this);
}
}
// Implementations for StackMapFrameVisitor.
public void visitAnyStackMapFrame(Clazz clazz, Method method, CodeAttribute codeAttribute, int offset, StackMapFrame stackMapFrame) {}
public void visitSameOneFrame(Clazz clazz, Method method, CodeAttribute codeAttribute, int offset, SameOneFrame sameOneFrame)
{
// Mark the constant pool entries referenced by the verification types.
sameOneFrame.stackItemAccept(clazz, method, codeAttribute, offset, this);
}
public void visitMoreZeroFrame(Clazz clazz, Method method, CodeAttribute codeAttribute, int offset, MoreZeroFrame moreZeroFrame)
{
// Mark the constant pool entries referenced by the verification types.
moreZeroFrame.additionalVariablesAccept(clazz, method, codeAttribute, offset, this);
}
public void visitFullFrame(Clazz clazz, Method method, CodeAttribute codeAttribute, int offset, FullFrame fullFrame)
{
// Mark the constant pool entries referenced by the verification types.
fullFrame.variablesAccept(clazz, method, codeAttribute, offset, this);
fullFrame.stackAccept(clazz, method, codeAttribute, offset, this);
}
// Implementations for VerificationTypeVisitor.
public void visitAnyVerificationType(Clazz clazz, Method method, CodeAttribute codeAttribute, int offset, VerificationType verificationType) {}
public void visitObjectType(Clazz clazz, Method method, CodeAttribute codeAttribute, int offset, ObjectType objectType)
{
markConstant(clazz, objectType.u2classIndex);
}
// Implementations for LocalVariableInfoVisitor.
public void visitLocalVariableInfo(Clazz clazz, Method method, CodeAttribute codeAttribute, LocalVariableInfo localVariableInfo)
{
markConstant(clazz, localVariableInfo.u2nameIndex);
markConstant(clazz, localVariableInfo.u2descriptorIndex);
}
// Implementations for LocalVariableTypeInfoVisitor.
public void visitLocalVariableTypeInfo(Clazz clazz, Method method, CodeAttribute codeAttribute, LocalVariableTypeInfo localVariableTypeInfo)
{
markConstant(clazz, localVariableTypeInfo.u2nameIndex);
markConstant(clazz, localVariableTypeInfo.u2signatureIndex);
}
// // Implementations for AnnotationVisitor.
//
// public void visitAnnotation(Clazz clazz, Annotation annotation)
// {
// markConstant(clazz, annotation.u2typeIndex);
//
// // Mark the constant pool entries referenced by the element values.
// annotation.elementValuesAccept(clazz, this);
// }
//
//
// // Implementations for ElementValueVisitor.
//
// public void visitConstantElementValue(Clazz clazz, Annotation annotation, ConstantElementValue constantElementValue)
// {
// if (constantElementValue.u2elementNameIndex != 0)
// {
// markConstant(clazz, constantElementValue.u2elementNameIndex);
// }
//
// markConstant(clazz, constantElementValue.u2constantValueIndex);
// }
//
//
// public void visitEnumConstantElementValue(Clazz clazz, Annotation annotation, EnumConstantElementValue enumConstantElementValue)
// {
// if (enumConstantElementValue.u2elementNameIndex != 0)
// {
// markConstant(clazz, enumConstantElementValue.u2elementNameIndex);
// }
//
// markConstant(clazz, enumConstantElementValue.u2typeNameIndex);
// markConstant(clazz, enumConstantElementValue.u2constantNameIndex);
// }
//
//
// public void visitClassElementValue(Clazz clazz, Annotation annotation, ClassElementValue classElementValue)
// {
// if (classElementValue.u2elementNameIndex != 0)
// {
// markConstant(clazz, classElementValue.u2elementNameIndex);
// }
//
// // Mark the referenced class constant pool entry.
// markConstant(clazz, classElementValue.u2classInfoIndex);
// }
//
//
// public void visitAnnotationElementValue(Clazz clazz, Annotation annotation, AnnotationElementValue annotationElementValue)
// {
// if (annotationElementValue.u2elementNameIndex != 0)
// {
// markConstant(clazz, annotationElementValue.u2elementNameIndex);
// }
//
// // Mark the constant pool entries referenced by the annotation.
// annotationElementValue.annotationAccept(clazz, this);
// }
//
//
// public void visitArrayElementValue(Clazz clazz, Annotation annotation, ArrayElementValue arrayElementValue)
// {
// if (arrayElementValue.u2elementNameIndex != 0)
// {
// markConstant(clazz, arrayElementValue.u2elementNameIndex);
// }
//
// // Mark the constant pool entries referenced by the element values.
// arrayElementValue.elementValuesAccept(clazz, annotation, this);
// }
// Implementations for InstructionVisitor.
public void visitAnyInstruction(Clazz clazz, Method method, CodeAttribute codeAttribute, int offset, Instruction instruction) {}
public void visitConstantInstruction(Clazz clazz, Method method, CodeAttribute codeAttribute, int offset, ConstantInstruction constantInstruction)
{
markConstant(clazz, constantInstruction.constantIndex);
// Also mark the parameterless constructor of the class, in case the
// string constant or class constant is being used in a Class.forName
// or a .class construct.
clazz.constantPoolEntryAccept(constantInstruction.constantIndex,
parameterlessConstructorMarker);
}
// Small utility methods.
/**
* Marks the given visitor accepter as being used.
*/
protected void markAsUsed(VisitorAccepter visitorAccepter)
{
visitorAccepter.setVisitorInfo(USED);
}
/**
* Returns whether the given visitor accepter should still be marked as
* being used.
*/
protected boolean shouldBeMarkedAsUsed(VisitorAccepter visitorAccepter)
{
return visitorAccepter.getVisitorInfo() != USED;
}
/**
* Returns whether the given visitor accepter has been marked as being used.
*/
protected boolean isUsed(VisitorAccepter visitorAccepter)
{
return visitorAccepter.getVisitorInfo() == USED;
}
/**
* Marks the given visitor accepter as possibly being used.
*/
protected void markAsPossiblyUsed(VisitorAccepter visitorAccepter)
{
visitorAccepter.setVisitorInfo(POSSIBLY_USED);
}
/**
* Returns whether the given visitor accepter should still be marked as
* possibly being used.
*/
protected boolean shouldBeMarkedAsPossiblyUsed(VisitorAccepter visitorAccepter)
{
return visitorAccepter.getVisitorInfo() != USED &&
visitorAccepter.getVisitorInfo() != POSSIBLY_USED;
}
/**
* Returns whether the given visitor accepter has been marked as possibly
* being used.
*/
protected boolean isPossiblyUsed(VisitorAccepter visitorAccepter)
{
return visitorAccepter.getVisitorInfo() == POSSIBLY_USED;
}
/**
* Clears any usage marks from the given visitor accepter.
*/
protected void markAsUnused(VisitorAccepter visitorAccepter)
{
visitorAccepter.setVisitorInfo(null);
}
/**
* Marks the given constant pool entry of the given class. This includes
* visiting any referenced objects.
*/
private void markConstant(Clazz clazz, int index)
{
clazz.constantPoolEntryAccept(index, this);
}
}
| |
/*
* Copyright (c) 2010-2016 Evolveum
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.evolveum.midpoint.model.common.expression.functions;
import java.io.File;
import java.io.IOException;
import java.text.Normalizer;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.LinkedList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.naming.InvalidNameException;
import javax.naming.NamingEnumeration;
import javax.naming.NamingException;
import javax.naming.directory.Attribute;
import javax.naming.directory.Attributes;
import javax.naming.ldap.LdapName;
import javax.naming.ldap.Rdn;
import javax.xml.datatype.XMLGregorianCalendar;
import com.evolveum.midpoint.prism.*;
import com.evolveum.midpoint.xml.ns._public.common.common_3.*;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.time.DateUtils;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import com.evolveum.midpoint.model.common.expression.script.ScriptExpression;
import com.evolveum.midpoint.model.common.expression.script.ScriptExpressionEvaluationContext;
import com.evolveum.midpoint.prism.crypto.EncryptionException;
import com.evolveum.midpoint.prism.crypto.Protector;
import com.evolveum.midpoint.prism.path.ItemPath;
import com.evolveum.midpoint.prism.polystring.PolyString;
import com.evolveum.midpoint.prism.xml.XmlTypeConverter;
import com.evolveum.midpoint.schema.constants.MidPointConstants;
import com.evolveum.midpoint.schema.constants.SchemaConstants;
import com.evolveum.midpoint.schema.processor.ResourceAttribute;
import com.evolveum.midpoint.schema.util.ObjectTypeUtil;
import com.evolveum.midpoint.schema.util.ShadowUtil;
import com.evolveum.midpoint.util.DOMUtil;
import com.evolveum.midpoint.util.exception.SchemaException;
import com.evolveum.midpoint.util.exception.SystemException;
import com.evolveum.midpoint.util.logging.Trace;
import com.evolveum.midpoint.util.logging.TraceManager;
import com.evolveum.prism.xml.ns._public.types_3.PolyStringType;
import com.evolveum.prism.xml.ns._public.types_3.ProtectedStringType;
/**
* Library of standard midPoint functions. These functions are made available to all
* midPoint expressions.
*
* The functions should be written to support scripting-like comfort. It means that they all needs
* to be null-safe, automatically convert data types as necessary and so on.
*
* @author Radovan Semancik
*
*/
public class BasicExpressionFunctions {
public static final String NAME_SEPARATOR = " ";
public static final Trace LOGGER = TraceManager.getTrace(BasicExpressionFunctions.class);
private static String STRING_PATTERN_WHITESPACE = "\\s+";
private static String STRING_PATTERN_HONORIFIC_PREFIX_ENDS_WITH_DOT = "^(\\S+\\.)$";
private static Pattern PATTERN_NICK_NAME = Pattern.compile("^([^\"]*)\"([^\"]+)\"([^\"]*)$");
private PrismContext prismContext;
private Protector protector;
public BasicExpressionFunctions(PrismContext prismContext, Protector protector) {
super();
this.prismContext = prismContext;
this.protector = protector;
}
/**
* Convert string to lower case.
*/
public static String lc(String orig) {
return StringUtils.lowerCase(orig);
}
/**
* Convert string to upper case.
*/
public static String uc(String orig) {
return StringUtils.upperCase(orig);
}
/**
* Remove whitespaces at the beginning and at the end of the string.
*/
public static String trim(String orig) {
return StringUtils.trim(orig);
}
/**
* Concatenates the arguments to create a name.
* Each argument is stringified, trimmed and the result is concatenated by spaces.
*/
public String concatName(Object... components) {
if (components == null || components.length == 0) {
return "";
}
boolean endsWithSeparator = false;
StringBuilder sb = new StringBuilder();
for (int i = 0; i < components.length; i++) {
Object component = components[i];
if (component == null) {
continue;
}
String stringComponent = stringify(component);
if (stringComponent == null) {
continue;
}
String trimmedStringComponent = trim(stringComponent);
if (trimmedStringComponent.isEmpty()) {
continue;
}
sb.append(trimmedStringComponent);
if (i < (components.length - 1)) {
sb.append(NAME_SEPARATOR);
endsWithSeparator = true;
} else {
endsWithSeparator = false;
}
}
if (endsWithSeparator) {
sb.delete(sb.length() - NAME_SEPARATOR.length(), sb.length());
}
return sb.toString();
}
/**
* Normalize a string value. It follows the default normalization algorithm
* used for PolyString values.
*
* @param orig original value to normalize
* @return normalized value
*/
public String norm(String orig) {
if (orig == null){
return null;
}
PolyString polyString = new PolyString(orig);
polyString.recompute(prismContext.getDefaultPolyStringNormalizer());
return polyString.getNorm();
}
/**
* Normalize a PolyString value.
*
* @param orig original value to normalize
* @return normalized value
*/
public String norm(PolyString orig) {
if (orig == null){
return null;
}
if (orig.getNorm() != null) {
return orig.getNorm();
}
orig.recompute(prismContext.getDefaultPolyStringNormalizer());
return orig.getNorm();
}
/**
* Normalize a PolyStringType value.
*
* @param orig original value to normalize
* @return normalized value
*/
public String norm(PolyStringType orig) {
if (orig == null){
return null;
}
PolyString polyString = orig.toPolyString();
return norm(polyString);
}
public String toAscii(Object input) {
if (input == null) {
return null;
}
String inputString = stringify(input);
String decomposed = Normalizer.normalize(inputString, Normalizer.Form.NFKD);
return decomposed.replaceAll("\\p{M}", "");
}
/**
* Converts whatever it gets to a string. But it does it in a sensitive way.
* E.g. it tries to detect collections and returns the first element (if there is only one).
* Never returns null. Returns empty string instead.
*/
public String stringify(Object whatever) {
if (whatever == null) {
return "";
}
if (whatever instanceof String) {
return (String)whatever;
}
if (whatever instanceof PolyString) {
return ((PolyString)whatever).getOrig();
}
if (whatever instanceof PolyStringType) {
return ((PolyStringType)whatever).getOrig();
}
if (whatever instanceof Collection) {
Collection collection = (Collection)whatever;
if (collection.isEmpty()) {
return "";
}
if (collection.size() > 1) {
throw new IllegalArgumentException("Cannot stringify collection because it has "+collection.size()+" values");
}
whatever = collection.iterator().next();
}
Class<? extends Object> whateverClass = whatever.getClass();
if (whateverClass.isArray()) {
Object[] array = (Object[])whatever;
if (array.length == 0) {
return "";
}
if (array.length > 1) {
throw new IllegalArgumentException("Cannot stringify array because it has "+array.length+" values");
}
whatever = array[0];
}
if (whatever == null) {
return "";
}
if (whatever instanceof String) {
return (String)whatever;
}
if (whatever instanceof PolyString) {
return ((PolyString)whatever).getOrig();
}
if (whatever instanceof PolyStringType) {
return ((PolyStringType)whatever).getOrig();
}
if (whatever instanceof Element) {
Element element = (Element)whatever;
Element origElement = DOMUtil.getChildElement(element, PolyString.F_ORIG);
if (origElement != null) {
// This is most likely a PolyStringType
return origElement.getTextContent();
} else {
return element.getTextContent();
}
}
if (whatever instanceof Node) {
return ((Node)whatever).getTextContent();
}
return whatever.toString();
}
public Collection<String> getOids(Collection<ObjectReferenceType> refs){
if (refs == null){
return null;
}
Collection<String> oids = new ArrayList<String>();
for (ObjectReferenceType ort : refs){
if (StringUtils.isNotBlank(ort.getOid())){
oids.add(ort.getOid());
} else if (ort.asReferenceValue().getObject() != null && StringUtils.isNotBlank(ort.asReferenceValue().getObject().getOid())){
oids.add(ort.asReferenceValue().getObject().getOid());
}
}
return oids;
}
public Collection<String> getOids(ObjectReferenceType refs){
List<ObjectReferenceType> refList = new ArrayList<>();
refList.add(refs);
return getOids(refList);
}
public Collection<String> getOids(ObjectType refs){
List<String> oid = new ArrayList<>();
oid.add(refs.getOid());
return oid;
}
public boolean isEmpty(Object whatever) {
if (whatever == null) {
return true;
}
if (whatever instanceof String) {
return ((String)whatever).isEmpty();
}
if (whatever instanceof Collection) {
return ((Collection)whatever).isEmpty();
}
String whateverString = stringify(whatever);
if (whateverString == null) {
return true;
}
return whateverString.isEmpty();
}
public <T> Collection<T> getExtensionPropertyValues(ObjectType object, String namespace, String localPart) {
return getExtensionPropertyValues(object, new javax.xml.namespace.QName(namespace, localPart));
}
public <T> Collection<T> getExtensionPropertyValues(ObjectType object, groovy.xml.QName propertyQname) {
return getExtensionPropertyValues(object, propertyQname.getNamespaceURI(), propertyQname.getLocalPart());
}
public <T> Collection<T> getExtensionPropertyValues(ObjectType object, javax.xml.namespace.QName propertyQname) {
return ObjectTypeUtil.getExtensionPropertyValuesNotNull(object, propertyQname);
}
public <T> T getExtensionPropertyValue(ObjectType object, String namespace, String localPart) throws SchemaException {
return getExtensionPropertyValue(object, new javax.xml.namespace.QName(namespace, localPart));
}
public Referencable getExtensionReferenceValue(ObjectType object, String namespace, String localPart) throws SchemaException {
return getExtensionReferenceValue(object, new javax.xml.namespace.QName(namespace, localPart));
}
public <T> T getExtensionPropertyValue(ObjectType object, groovy.xml.QName propertyQname) throws SchemaException {
return getExtensionPropertyValue(object, propertyQname.getNamespaceURI(), propertyQname.getLocalPart());
}
public <T> T getExtensionPropertyValue(ObjectType object, javax.xml.namespace.QName propertyQname) throws SchemaException {
if (object == null) {
return null;
}
Collection<T> values = ObjectTypeUtil.getExtensionPropertyValues(object, propertyQname);
return toSingle(values, "a multi-valued extension property "+propertyQname);
}
public Referencable getExtensionReferenceValue(ObjectType object, javax.xml.namespace.QName propertyQname) throws SchemaException {
if (object == null) {
return null;
}
Collection<Referencable> values = ObjectTypeUtil.getExtensionReferenceValues(object, propertyQname);
return toSingle(values, "a multi-valued extension property "+propertyQname);
}
public <T> T getPropertyValue(ObjectType object, String path) throws SchemaException {
Collection<T> values = getPropertyValues(object, path);
return toSingle(values, "a multi-valued property "+path);
}
public <T> Collection<T> getPropertyValues(ObjectType object, String path) {
if (object == null) {
return null;
}
ScriptExpressionEvaluationContext scriptContext = ScriptExpressionEvaluationContext.getThreadLocal();
ScriptExpression scriptExpression = scriptContext.getScriptExpression();
ItemPath itemPath = scriptExpression.parsePath(path);
PrismProperty property = object.asPrismObject().findProperty(itemPath);
if (property == null) {
return new ArrayList<T>(0);
}
return property.getRealValues();
}
public <T> Collection<T> getAttributeValues(ShadowType shadow, String attributeNamespace, String attributeLocalPart) {
return getAttributeValues(shadow, new javax.xml.namespace.QName(attributeNamespace, attributeLocalPart));
}
public <T> Collection<T> getAttributeValues(ShadowType shadow, String attributeLocalPart) {
return getAttributeValues(shadow, new javax.xml.namespace.QName(MidPointConstants.NS_RI, attributeLocalPart));
}
public <T> Collection<T> getAttributeValues(ShadowType shadow, groovy.xml.QName attributeQname) {
return getAttributeValues(shadow, attributeQname.getNamespaceURI(), attributeQname.getLocalPart());
}
public <T> Collection<T> getAttributeValues(ShadowType shadow, javax.xml.namespace.QName attributeQname) {
return ShadowUtil.getAttributeValues(shadow, attributeQname);
}
public <T> T getAttributeValue(ShadowType shadow, String attributeNamespace, String attributeLocalPart) throws SchemaException {
return getAttributeValue(shadow, new javax.xml.namespace.QName(attributeNamespace, attributeLocalPart));
}
public <T> T getAttributeValue(ShadowType shadow, String attributeLocalPart) throws SchemaException {
return getAttributeValue(shadow, new javax.xml.namespace.QName(MidPointConstants.NS_RI, attributeLocalPart));
}
public <T> T getAttributeValue(ShadowType shadow, groovy.xml.QName attributeQname) throws SchemaException {
return getAttributeValue(shadow, attributeQname.getNamespaceURI(), attributeQname.getLocalPart());
}
public <T> T getAttributeValue(ShadowType shadow, javax.xml.namespace.QName attributeQname) throws SchemaException {
return ShadowUtil.getAttributeValue(shadow, attributeQname);
}
public Collection<String> getAttributeStringValues(ShadowType shadow, String attributeNamespace, String attributeLocalPart) {
return getAttributeStringValues(shadow, new javax.xml.namespace.QName(attributeNamespace, attributeLocalPart));
}
public Collection<String> getAttributeStringValues(ShadowType shadow, groovy.xml.QName attributeQname) {
return getAttributeStringValues(shadow, attributeQname.getNamespaceURI(), attributeQname.getLocalPart());
}
public Collection<String> getAttributeStringValues(ShadowType shadow, javax.xml.namespace.QName attributeQname) {
return ShadowUtil.getAttributeValues(shadow, attributeQname, String.class);
}
public <T> T getIdentifierValue(ShadowType shadow) throws SchemaException {
if (shadow == null) {
return null;
}
Collection<ResourceAttribute<?>> identifiers = ShadowUtil.getPrimaryIdentifiers(shadow);
if (identifiers.size() == 0) {
return null;
}
if (identifiers.size() > 1) {
throw new SchemaException("More than one idenfier in "+shadow);
}
Collection<T> realValues = (Collection<T>) identifiers.iterator().next().getRealValues();
if (realValues.size() == 0) {
return null;
}
if (realValues.size() > 1) {
throw new SchemaException("More than one idenfier value in "+shadow);
}
return realValues.iterator().next();
}
public <T> T getSecondaryIdentifierValue(ShadowType shadow) throws SchemaException {
if (shadow == null) {
return null;
}
Collection<ResourceAttribute<?>> identifiers = ShadowUtil.getSecondaryIdentifiers(shadow);
if (identifiers.size() == 0) {
return null;
}
if (identifiers.size() > 1) {
throw new SchemaException("More than one secondary idenfier in "+shadow);
}
Collection<T> realValues = (Collection<T>) identifiers.iterator().next().getRealValues();
if (realValues.size() == 0) {
return null;
}
if (realValues.size() > 1) {
throw new SchemaException("More than one secondary idenfier value in "+shadow);
}
return realValues.iterator().next();
}
public String determineLdapSingleAttributeValue(Collection<String> dns, String attributeName, PrismProperty attribute) throws NamingException {
return determineLdapSingleAttributeValue(dns, attributeName, attribute.getRealValues());
}
public <T> T getResourceIcfConfigurationPropertyValue(ResourceType resource, javax.xml.namespace.QName propertyQname) throws SchemaException {
if (propertyQname == null) {
return null;
}
PrismContainer<?> configurationProperties = getIcfConfigurationProperties(resource);
if (configurationProperties == null) {
return null;
}
PrismProperty<T> property = configurationProperties.findProperty(propertyQname);
if (property == null) {
return null;
}
return property.getRealValue();
}
public <T> T getResourceIcfConfigurationPropertyValue(ResourceType resource, String propertyLocalPart) throws SchemaException {
if (propertyLocalPart == null) {
return null;
}
PrismContainer<?> configurationProperties = getIcfConfigurationProperties(resource);
if (configurationProperties == null) {
return null;
}
for (PrismProperty<?> property: configurationProperties.getValue().getProperties()) {
if (propertyLocalPart.equals(property.getElementName().getLocalPart())) {
return (T) property.getRealValue();
}
}
return null;
}
private PrismContainer<?> getIcfConfigurationProperties(ResourceType resource) {
if (resource == null) {
return null;
}
PrismContainer<?> connectorConfiguration = resource.asPrismObject().findContainer(ResourceType.F_CONNECTOR_CONFIGURATION);
if (connectorConfiguration == null) {
return null;
}
return connectorConfiguration.findContainer(SchemaConstants.ICF_CONFIGURATION_PROPERTIES);
}
public String determineLdapSingleAttributeValue(Collection<String> dns, String attributeName, Collection<String> values) throws NamingException {
if (values == null || values.isEmpty()) {
// Shortcut. This is maybe the most common case. We want to return quickly and we also need to avoid more checks later.
return null;
}
if (dns == null || dns.isEmpty()) {
return determineLdapSingleAttributeValue((String)null, attributeName, values);
}
if (dns.size() > 1) {
throw new IllegalArgumentException("Nore than one value ("+dns.size()+" for dn argument specified");
}
return determineLdapSingleAttributeValue(dns.iterator().next(), attributeName, values);
}
// We cannot have Collection<String> here. The generic type information will disappear at runtime and the scripts can pass
// anything that they find suitable. E.g. XPath is passing elements
public String determineLdapSingleAttributeValue(String dn, String attributeName, Collection<?> values) throws NamingException {
if (values == null || values.isEmpty()) {
return null;
}
Collection<String> stringValues = null;
// Determine item type, try to convert to strings
Object firstElement = values.iterator().next();
if (firstElement instanceof String) {
stringValues = (Collection)values;
} else if (firstElement instanceof Element) {
stringValues = new ArrayList<String>(values.size());
for (Object value: values) {
Element element = (Element)value;
stringValues.add(element.getTextContent());
}
} else {
throw new IllegalArgumentException("Unexpected value type "+firstElement.getClass());
}
if (stringValues.size() == 1) {
return stringValues.iterator().next();
}
if (StringUtils.isBlank(dn)) {
throw new IllegalArgumentException("No dn argument specified, cannot determine which of "+values.size()+" values to use");
}
LdapName parsedDn = new LdapName(dn);
for (int i=0; i < parsedDn.size(); i++) {
Rdn rdn = parsedDn.getRdn(i);
Attributes rdnAttributes = rdn.toAttributes();
NamingEnumeration<String> rdnIDs = rdnAttributes.getIDs();
while (rdnIDs.hasMore()) {
String rdnID = rdnIDs.next();
Attribute attribute = rdnAttributes.get(rdnID);
if (attributeName.equals(attribute.getID())) {
for (int j=0; j < attribute.size(); j++) {
Object value = attribute.get(j);
if (stringValues.contains(value)) {
return (String) value;
}
}
}
}
}
// Fallback. No values in DN. Just return the first alphabetically-wise value.
return Collections.min(stringValues);
}
public <T> T toSingle(Collection<T> values) throws SchemaException {
if (values == null || values.isEmpty()) {
return null;
} else if (values.size() > 1) {
throw new SchemaException("Attempt to get single value from a multi-valued property");
} else {
return values.iterator().next();
}
}
private <T> T toSingle(Collection<T> values, String contextDesc) throws SchemaException {
if (values == null || values.isEmpty()) {
return null;
} else if (values.size() > 1) {
throw new SchemaException("Attempt to get single value from " + contextDesc);
} else {
return values.iterator().next();
}
}
public static String readFile(String filename) throws IOException {
return FileUtils.readFileToString(new File(filename));
}
public String formatDateTime(String format, XMLGregorianCalendar xmlCal) {
if (xmlCal == null || format == null) {
return null;
}
SimpleDateFormat sdf = new SimpleDateFormat(format);
Date date = XmlTypeConverter.toDate(xmlCal);
return sdf.format(date);
}
public String formatDateTime(String format, Long millis) {
if (millis == null || format == null) {
return null;
}
SimpleDateFormat sdf = new SimpleDateFormat(format);
return sdf.format(millis);
}
public XMLGregorianCalendar parseDateTime(String format, String stringDate) throws ParseException {
if (format == null || stringDate == null) {
return null;
}
String[] formats = new String[]{format};
Date date = DateUtils.parseDate(stringDate, formats);
if (date == null) {
return null;
}
return XmlTypeConverter.createXMLGregorianCalendar(date);
}
public XMLGregorianCalendar currentDateTime() {
return XmlTypeConverter.createXMLGregorianCalendar(System.currentTimeMillis());
}
private ParsedFullName parseFullName(String fullName) {
if (StringUtils.isBlank(fullName)) {
return null;
}
String root = fullName.trim();
ParsedFullName p = new ParsedFullName();
// LOGGER.trace("(1) root=", root);
Matcher m = PATTERN_NICK_NAME.matcher(root);
if (m.matches()) {
String nickName = m.group(2).trim();
p.setNickName(nickName);
root = m.group(1) + " " + m.group(3);
// LOGGER.trace("nick={}, root={}", nickName, root);
}
String[] words = root.split(STRING_PATTERN_WHITESPACE);
int i = 0;
// LOGGER.trace("(2) i={}, words={}", i, Arrays.toString(words));
StringBuilder honorificPrefixBuilder = new StringBuilder();
while (i < words.length && words[i].matches(STRING_PATTERN_HONORIFIC_PREFIX_ENDS_WITH_DOT)) {
honorificPrefixBuilder.append(words[i]);
honorificPrefixBuilder.append(" ");
i++;
}
if (honorificPrefixBuilder.length() > 0) {
honorificPrefixBuilder.setLength(honorificPrefixBuilder.length() - 1);
p.setHonorificPrefix(honorificPrefixBuilder.toString());
}
// LOGGER.trace("(3) i={}, words={}", i, Arrays.toString(words));
List<String> rootNameWords = new ArrayList<>();
while (i < words.length && !words[i].endsWith(",")) {
rootNameWords.add(words[i]);
i++;
}
if (i < words.length && words[i].endsWith(",")) {
String word = words[i];
i++;
if (!word.equals(",")) {
word = word.substring(0, word.length() - 1);
rootNameWords.add(word);
}
}
// LOGGER.trace("(4) i={}, words={}", i, Arrays.toString(words));
// LOGGER.trace("(4) rootNameWords={}", rootNameWords);
if (rootNameWords.size() > 1) {
p.setFamilyName(rootNameWords.get(rootNameWords.size() - 1));
rootNameWords.remove(rootNameWords.size() - 1);
p.setGivenName(rootNameWords.get(0));
rootNameWords.remove(0);
p.setAdditionalName(StringUtils.join(rootNameWords, " "));
} else if (rootNameWords.size() == 1) {
p.setFamilyName(rootNameWords.get(0));
}
StringBuilder honorificSuffixBuilder = new StringBuilder();
while (i < words.length) {
honorificSuffixBuilder.append(words[i]);
honorificSuffixBuilder.append(" ");
i++;
}
if (honorificSuffixBuilder.length() > 0) {
honorificSuffixBuilder.setLength(honorificSuffixBuilder.length() - 1);
p.setHonorificSuffix(honorificSuffixBuilder.toString());
}
LOGGER.trace("Parsed full name '{}' as {}", fullName, p);
return p;
}
public String parseGivenName(Object fullName) {
ParsedFullName p = parseFullName(stringify(fullName));
if (p == null) {
return null;
} else {
return p.getGivenName();
}
}
public String parseFamilyName(Object fullName) {
ParsedFullName p = parseFullName(stringify(fullName));
if (p == null) {
return null;
} else {
return p.getFamilyName();
}
}
public String parseAdditionalName(Object fullName) {
ParsedFullName p = parseFullName(stringify(fullName));
if (p == null) {
return null;
} else {
return p.getAdditionalName();
}
}
public String parseNickName(Object fullName) {
ParsedFullName p = parseFullName(stringify(fullName));
if (p == null) {
return null;
} else {
return p.getNickName();
}
}
public String parseHonorificPrefix(Object fullName) {
ParsedFullName p = parseFullName(stringify(fullName));
if (p == null) {
return null;
} else {
return p.getHonorificPrefix();
}
}
public String parseHonorificSuffix(Object fullName) {
ParsedFullName p = parseFullName(stringify(fullName));
if (p == null) {
return null;
} else {
return p.getHonorificSuffix();
}
}
public String decrypt(ProtectedStringType protectedString) {
try {
return protector.decryptString(protectedString);
} catch (EncryptionException e) {
throw new SystemException(e.getMessage(), e);
}
}
public ProtectedStringType encrypt(String string) {
try {
return protector.encryptString(string);
} catch (EncryptionException e) {
throw new SystemException(e.getMessage(), e);
}
}
/**
* Creates a valid LDAP distinguished name from the wide range of components. The method
* can be invoked in many ways, e.g.:
*
* composeDn("cn","foo","o","bar")
* composeDn("cn","foo",new Rdn("o","bar"))
* composeDn(new Rdn("cn","foo"),"ou","baz",new Rdn("o","bar"))
* composeDn(new Rdn("cn","foo"),"ou","baz","o","bar")
* composeDn(new Rdn("cn","foo"),new LdapName("ou=baz,o=bar"))
* composeDn("cn","foo",new LdapName("ou=baz,o=bar"))
*
* Note: the DN is not normalized. The case of the attribute names and white spaces are
* preserved.
*/
public static String composeDn(Object... components) throws InvalidNameException {
if (components == null) {
return null;
}
if (components.length == 0) {
return null;
}
if (components.length == 1 && components[0] == null) {
return null;
}
if (components.length == 1 && (components[0] instanceof String) && StringUtils.isBlank((String)(components[0]))) {
return null;
}
LinkedList<Rdn> rdns = new LinkedList<>();
String attrName = null;
for (Object component: components) {
if (attrName != null && !(component instanceof String || component instanceof PolyString || component instanceof PolyStringType)) {
throw new InvalidNameException("Invalid input to composeDn() function: expected string after '"+attrName+"' argument, but got "+component.getClass());
}
if (component instanceof Rdn) {
rdns.addFirst((Rdn)component);
} else if (component instanceof PolyString) {
component = ((PolyString)component).toString();
} else if (component instanceof PolyStringType) {
component = ((PolyStringType)component).toString();
}
if (component instanceof String) {
if (attrName == null) {
attrName = (String)component;
} else {
rdns.addFirst(new Rdn(attrName, (String)component));
attrName = null;
}
}
if (component instanceof LdapName) {
rdns.addAll(0,((LdapName)component).getRdns());
}
}
LdapName dn = new LdapName(rdns);
return dn.toString();
}
/**
* Creates a valid LDAP distinguished name from the wide range of components assuming that
* the last component is a suffix. The method can be invoked in many ways, e.g.:
*
* composeDn("cn","foo","o=bar")
* composeDn(new Rdn("cn","foo"),"ou=baz,o=bar")
* composeDn(new Rdn("cn","foo"),new LdapName("ou=baz,o=bar"))
* composeDn("cn","foo",new LdapName("ou=baz,o=bar"))
*
* The last element is a complete suffix represented either as String or LdapName.
*
* Note: the DN is not normalized. The case of the attribute names and white spaces are
* preserved.
*/
public static String composeDnWithSuffix(Object... components) throws InvalidNameException {
if (components == null) {
return null;
}
if (components.length == 0) {
return null;
}
if (components.length == 1) {
if (components[0] == null) {
return null;
}
if ((components[0] instanceof String)) {
if (StringUtils.isBlank((String)(components[0]))) {
return null;
} else {
return (new LdapName((String)(components[0]))).toString();
}
} else if ((components[0] instanceof LdapName)) {
return ((LdapName)(components[0])).toString();
} else {
throw new InvalidNameException("Invalid input to composeDn() function: expected suffix (last element) to be String or LdapName, but it was "+components[0].getClass());
}
}
Object suffix = components[components.length - 1];
if (suffix instanceof String) {
suffix = new LdapName((String)suffix);
}
if (!(suffix instanceof LdapName)) {
throw new InvalidNameException("Invalid input to composeDn() function: expected suffix (last element) to be String or LdapName, but it was "+suffix.getClass());
}
components[components.length - 1] = suffix;
return composeDn(components);
}
}
| |
/*
* Copyright (C) 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.cloud.dataflow.sdk.transforms;
import com.google.cloud.dataflow.sdk.coders.Coder;
import com.google.cloud.dataflow.sdk.coders.CoderRegistry;
import com.google.cloud.dataflow.sdk.coders.ListCoder;
import com.google.cloud.dataflow.sdk.runners.DirectPipelineRunner;
import com.google.cloud.dataflow.sdk.transforms.Combine.CombineFn;
import com.google.cloud.dataflow.sdk.util.PCollectionViews;
import com.google.cloud.dataflow.sdk.util.WindowedValue;
import com.google.cloud.dataflow.sdk.values.KV;
import com.google.cloud.dataflow.sdk.values.PCollection;
import com.google.cloud.dataflow.sdk.values.PCollectionView;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* Transforms for creating {@link PCollectionView}s from {@link PCollection}s,
* for consuming the contents of those {@link PCollection}s as side inputs
* to {@link ParDo} transforms. These transforms support viewing a {@link PCollection}
* as a single value, an iterable, a map, or a multimap.
*
* <p>For a {@link PCollection} that contains a single value of type {@code T}
* per window, such as the output of {@link Combine#globally},
* use {@link View#asSingleton()} to prepare it for use as a side input:
*
* <pre>
* {@code
* PCollectionView<T> output = someOtherPCollection
* .apply(Combine.globally(...))
* .apply(View.asSingleton());
* }
* </pre>
*
* <p>For a small {@link PCollection} that can fit entirely in memory,
* use {@link View#asList()} to prepare it for use as a {@code List}.
* When read as a side input, the entire list will be cached in memory.
*
* <pre>
* {@code
* PCollectionView<List<T>> output =
* smallPCollection.apply(View.asList());
* }
* </pre>
*
* <p>If a {@link PCollection} of {@code KV<K, V>} is known to
* have a single value for each key, then use {@link View#asMap()}
* to view it as a {@code Map<K, V>}:
*
* <pre>
* {@code
* PCollectionView<Map<K, V> output =
* somePCollection.apply(View.asMap());
* }
* </pre>
*
* <p>Otherwise, to access a {@link PCollection} of {@code KV<K, V>} as a
* {@code Map<K, Iterable<V>>} side input, use {@link View#asMultimap()}:
*
* <pre>
* {@code
* PCollectionView<Map<K, Iterable<V>> output =
* somePCollection.apply(View.asMap());
* }
* </pre>
*
* <p>To iterate over an entire window of a {@link PCollection} via
* side input, use {@link View#asIterable()}:
*
* <pre>
* {@code
* PCollectionView<Iterable<T>> output =
* somePCollection.apply(View.asIterable());
* }
* </pre>
*
*
* <p>Both {@link View#asMultimap()} and {@link View#asMap()} are useful
* for implementing lookup based "joins" with the main input, when the
* side input is small enough to fit into memory.
*
* <p>For example, if you represent a page on a website via some {@code Page} object and
* have some type {@code UrlVisits} logging that a URL was visited, you could convert these
* to more fully structured {@code PageVisit} objects using a side input, something like the
* following:
*
* <pre>
* {@code
* PCollection<Page> pages = ... // pages fit into memory
* PCollection<UrlVisit> urlVisits = ... // very large collection
* final PCollectionView<Map<URL, Page>> = urlToPage
* .apply(WithKeys.of( ... )) // extract the URL from the page
* .apply(View.asMap());
*
* PCollection PageVisits = urlVisits
* .apply(ParDo.withSideInputs(urlToPage)
* .of(new DoFn<UrlVisit, PageVisit>() {
* {@literal @}Override
* void processElement(ProcessContext context) {
* UrlVisit urlVisit = context.element();
* Page page = urlToPage.get(urlVisit.getUrl());
* c.output(new PageVisit(page, urlVisit.getVisitData()));
* }
* }));
* }
* </pre>
*
* <p>See {@link ParDo#withSideInputs} for details on how to access
* this variable inside a {@link ParDo} over another {@link PCollection}.
*/
public class View {
// Do not instantiate
private View() { }
/**
* Returns a {@link AsSingleton} transform that takes a singleton
* {@link PCollection} as input and produces a {@link PCollectionView}
* of the single value, to be consumed as a side input.
*
* <pre>
* {@code
* PCollection<InputT> input = ...
* CombineFn<InputT, OutputT> yourCombineFn = ...
* PCollectionView<OutputT> output = input
* .apply(Combine.globally(yourCombineFn))
* .apply(View.asSingleton());
* }</pre>
*
* <p>If the input {@link PCollection} is empty,
* throws {@link java.util.NoSuchElementException} in the consuming
* {@link DoFn}.
*
* <p>If the input {@link PCollection} contains more than one
* element, throws {@link IllegalArgumentException} in the
* consuming {@link DoFn}.
*/
public static <T> AsSingleton<T> asSingleton() {
return new AsSingleton<>();
}
/**
* Returns a transform that takes a {@link PCollection} and returns a
* {@code List} containing all of its elements, to be consumed as
* a side input.
*
* <p>The resulting list is required to fit in memory.
*/
public static <T> PTransform<PCollection<T>, PCollectionView<List<T>>> asList() {
return new AsList<>();
}
/**
* Returns a {@link AsIterable} that takes a
* {@link PCollection} as input and produces a {@link PCollectionView}
* of the values, to be consumed as an iterable side input. The values of
* this {@code Iterable} may not be cached; if that behavior is desired, use
* {@link #asList}.
*/
public static <T> AsIterable<T> asIterable() {
return new AsIterable<>();
}
/**
* Returns an {@link AsMap} transform that takes a {@link PCollection} as input
* and produces a {@link PCollectionView} of the values to be consumed
* as a {@code Map<K, V>} side input. It is required that each key of the input be
* associated with a single value. If this is not the case, precede this
* view with {@code Combine.perKey}, as below, or alternatively use {@link View#asMultimap()}.
*
* <pre>
* {@code
* PCollection<KV<K, V>> input = ...
* CombineFn<V, OutputT> yourCombineFn = ...
* PCollectionView<Map<K, OutputT>> output = input
* .apply(Combine.perKey(yourCombineFn.<K>asKeyedFn()))
* .apply(View.asMap());
* }</pre>
*
* <p>Currently, the resulting map is required to fit into memory.
*/
public static <K, V> AsMap<K, V> asMap() {
return new AsMap<K, V>();
}
/**
* Returns an {@link AsMultimap} transform that takes a {@link PCollection}
* of {@code KV<K, V>} pairs as input and produces a {@link PCollectionView} of
* its contents as a {@code Map<K, Iterable<V>>} for use as a side input.
* In contrast to {@link View#asMap()}, it is not required that the keys in the
* input collection be unique.
*
* <pre>
* {@code
* PCollection<KV<K, V>> input = ... // maybe more than one occurrence of a some keys
* PCollectionView<Map<K, V>> output = input.apply(View.asMultimap());
* }</pre>
*
* <p>Currently, the resulting map is required to fit into memory.
*/
public static <K, V> AsMultimap<K, V> asMultimap() {
return new AsMultimap<K, V>();
}
/**
* A {@link PTransform} that produces a {@link PCollectionView} of a singleton
* {@link PCollection} yielding the single element it contains.
*
* <p>Instantiate via {@link View#asIterable}.
*/
public static class AsList<T> extends PTransform<PCollection<T>, PCollectionView<List<T>>> {
private AsList() { }
@Override
public void validate(PCollection<T> input) {
try {
GroupByKey.applicableTo(input);
} catch (IllegalStateException e) {
throw new IllegalStateException("Unable to create a side-input view from input", e);
}
}
@Override
public PCollectionView<List<T>> apply(PCollection<T> input) {
return input.apply(Combine.globally(new Concatenate<T>()).asSingletonView());
}
}
/**
* A {@link PTransform} that produces a {@link PCollectionView} of a singleton
* {@link PCollection} yielding the single element it contains.
*
* <p>Instantiate via {@link View#asIterable}.
*/
public static class AsIterable<T>
extends PTransform<PCollection<T>, PCollectionView<Iterable<T>>> {
private AsIterable() { }
@Override
public void validate(PCollection<T> input) {
try {
GroupByKey.applicableTo(input);
} catch (IllegalStateException e) {
throw new IllegalStateException("Unable to create a side-input view from input", e);
}
}
@Override
public PCollectionView<Iterable<T>> apply(PCollection<T> input) {
return input.apply(CreatePCollectionView.<T, Iterable<T>>of(PCollectionViews.iterableView(
input.getPipeline(), input.getWindowingStrategy(), input.getCoder())));
}
}
/**
* A {@link PTransform} that produces a {@link PCollectionView} of a singleton
* {@link PCollection} yielding the single element it contains.
*
* <p>Instantiate via {@link View#asIterable}.
*/
public static class AsSingleton<T> extends PTransform<PCollection<T>, PCollectionView<T>> {
private final T defaultValue;
private final boolean hasDefault;
private AsSingleton() {
this.defaultValue = null;
this.hasDefault = false;
}
private AsSingleton(T defaultValue) {
this.defaultValue = defaultValue;
this.hasDefault = true;
}
/**
* Returns whether this transform has a default value.
*/
public boolean hasDefaultValue() {
return hasDefault;
}
/**
* Returns the default value of this transform, or null if there isn't one.
*/
public T defaultValue() {
return defaultValue;
}
/**
* Default value to return for windows with no value in them.
*/
public AsSingleton<T> withDefaultValue(T defaultValue) {
return new AsSingleton<>(defaultValue);
}
@Override
public void validate(PCollection<T> input) {
try {
GroupByKey.applicableTo(input);
} catch (IllegalStateException e) {
throw new IllegalStateException("Unable to create a side-input view from input", e);
}
}
@Override
public PCollectionView<T> apply(PCollection<T> input) {
return input.apply(CreatePCollectionView.<T, T>of(PCollectionViews.singletonView(
input.getPipeline(),
input.getWindowingStrategy(),
hasDefault,
defaultValue,
input.getCoder())));
}
}
/**
* A {@link PTransform} that produces a {@link PCollectionView} of a keyed {@link PCollection}
* yielding a map of keys to all associated values.
*
* <p>Instantiate via {@link View#asMap}.
*/
public static class AsMultimap<K, V>
extends PTransform<PCollection<KV<K, V>>, PCollectionView<Map<K, Iterable<V>>>> {
private AsMultimap() { }
@Override
public void validate(PCollection<KV<K, V>> input) {
try {
GroupByKey.applicableTo(input);
} catch (IllegalStateException e) {
throw new IllegalStateException("Unable to create a side-input view from input", e);
}
}
@Override
public PCollectionView<Map<K, Iterable<V>>> apply(PCollection<KV<K, V>> input) {
return input.apply(CreatePCollectionView.<KV<K, V>, Map<K, Iterable<V>>>of(
PCollectionViews.multimapView(
input.getPipeline(),
input.getWindowingStrategy(),
input.getCoder())));
}
}
/**
* A {@link PTransform} that produces a {@link PCollectionView} of a keyed {@link PCollection}
* yielding a map from each key to its unique associated value. When converting
* a {@link PCollection} that has more than one value per key, precede this transform with a
* {@code Combine.perKey}:
*
* <pre>
* {@code
* PCollectionView<Map<K, OutputT>> input
* .apply(Combine.perKey(myCombineFunction))
* .apply(View.asMap());
* }</pre>
*
* <p>Instantiate via {@link View#asMap}.
*/
public static class AsMap<K, V>
extends PTransform<PCollection<KV<K, V>>, PCollectionView<Map<K, V>>> {
private AsMap() { }
/**
* @deprecated this method simply returns this AsMap unmodified
*/
@Deprecated()
public AsMap<K, V> withSingletonValues() {
return this;
}
@Override
public void validate(PCollection<KV<K, V>> input) {
try {
GroupByKey.applicableTo(input);
} catch (IllegalStateException e) {
throw new IllegalStateException("Unable to create a side-input view from input", e);
}
}
@Override
public PCollectionView<Map<K, V>> apply(PCollection<KV<K, V>> input) {
return input.apply(CreatePCollectionView.<KV<K, V>, Map<K, V>>of(
PCollectionViews.mapView(
input.getPipeline(),
input.getWindowingStrategy(),
input.getCoder())));
}
}
////////////////////////////////////////////////////////////////////////////
// Internal details below
/**
* Creates a primitive {@link PCollectionView}.
*
* <p>For internal use only by runner implementors.
*
* @param <ElemT> The type of the elements of the input PCollection
* @param <ViewT> The type associated with the {@link PCollectionView} used as a side input
*/
public static class CreatePCollectionView<ElemT, ViewT>
extends PTransform<PCollection<ElemT>, PCollectionView<ViewT>> {
private PCollectionView<ViewT> view;
private CreatePCollectionView(PCollectionView<ViewT> view) {
this.view = view;
}
public static <ElemT, ViewT> CreatePCollectionView<ElemT, ViewT> of(
PCollectionView<ViewT> view) {
return new CreatePCollectionView<>(view);
}
@Override
public PCollectionView<ViewT> apply(PCollection<ElemT> input) {
return view;
}
static {
DirectPipelineRunner.registerDefaultTransformEvaluator(
CreatePCollectionView.class,
new DirectPipelineRunner.TransformEvaluator<CreatePCollectionView>() {
@SuppressWarnings("rawtypes")
@Override
public void evaluate(
CreatePCollectionView transform,
DirectPipelineRunner.EvaluationContext context) {
evaluateTyped(transform, context);
}
private <ElemT, ViewT> void evaluateTyped(
CreatePCollectionView<ElemT, ViewT> transform,
DirectPipelineRunner.EvaluationContext context) {
List<WindowedValue<ElemT>> elems =
context.getPCollectionWindowedValues(context.getInput(transform));
context.setPCollectionView(context.getOutput(transform), elems);
}
});
}
}
/**
* Combiner that combines {@code T}s into a single {@code List<T>} containing
* all inputs.
*
* <p>For internal use only by {@link View#asList()}, which views a tiny {@link PCollection}
* that fits in memory as a single {@code List}. For a large {@link PCollection} this is
* expected to crash!
*
* @param <T> the type of elements to concatenate.
*/
public static class Concatenate<T> extends CombineFn<T, List<T>, List<T>> {
@Override
public List<T> createAccumulator() {
return new ArrayList<T>();
}
@Override
public List<T> addInput(List<T> accumulator, T input) {
accumulator.add(input);
return accumulator;
}
@Override
public List<T> mergeAccumulators(Iterable<List<T>> accumulators) {
List<T> result = createAccumulator();
for (List<T> accumulator : accumulators) {
result.addAll(accumulator);
}
return result;
}
@Override
public List<T> extractOutput(List<T> accumulator) {
return accumulator;
}
@Override
public Coder<List<T>> getAccumulatorCoder(CoderRegistry registry, Coder<T> inputCoder) {
return ListCoder.of(inputCoder);
}
@Override
public Coder<List<T>> getDefaultOutputCoder(CoderRegistry registry, Coder<T> inputCoder) {
return ListCoder.of(inputCoder);
}
}
}
| |
/*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.diff.impl.mergeTool;
import com.intellij.ide.util.PsiNavigationSupport;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.diff.DiffContent;
import com.intellij.openapi.diff.MergeRequest;
import com.intellij.openapi.diff.SimpleContent;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.DialogWrapper;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.pom.Navigatable;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.IOException;
@Deprecated
public class MergeRequestImpl extends MergeRequest {
private static final Logger LOG = Logger.getInstance(MergeRequestImpl.class);
private final DiffContent[] myDiffContents = new DiffContent[3];
private @Nls String myWindowTitle = null;
private String[] myVersionTitles = null;
private int myResult = DialogWrapper.CANCEL_EXIT_CODE;
private String myHelpId;
public MergeRequestImpl(@NotNull String left,
@NotNull MergeVersion base,
@NotNull String right,
@Nullable Project project) {
this(new SimpleContent(left),
new MergeContent(base, project),
new SimpleContent(right),
project);
}
public MergeRequestImpl(@NotNull String left,
@NotNull String base,
@NotNull String right,
@Nullable FileType type,
@Nullable Project project) {
this(new SimpleContent(left, type),
new SimpleContent(base, type),
new SimpleContent(right, type),
project);
}
private MergeRequestImpl(@NotNull DiffContent left,
@NotNull DiffContent base,
@NotNull DiffContent right,
@Nullable Project project) {
super(project);
myDiffContents[0] = left;
myDiffContents[1] = base;
myDiffContents[2] = right;
if (LOG.isDebugEnabled()) {
VirtualFile file = base.getFile();
LOG.debug(new Throwable(base.getClass() + " - writable: " + base.getDocument().isWritable() + ", contentType: " +
base.getContentType() + ", file: " + (file != null ? "valid - " + file.isValid() : "null")));
}
}
@Override
public DiffContent @NotNull [] getContents() {
return myDiffContents;
}
@Override
public String[] getContentTitles() {
return myVersionTitles;
}
@Override
public void setVersionTitles(String[] versionTitles) {
myVersionTitles = versionTitles;
}
@Nls
@Override
public String getWindowTitle() {
return myWindowTitle;
}
@Override
public void setWindowTitle(@Nls String windowTitle) {
myWindowTitle = windowTitle;
}
public void setResult(int result) {
if (result == DialogWrapper.OK_EXIT_CODE) applyChanges();
myResult = result;
}
public void applyChanges() {
MergeContent mergeContent = getMergeContent();
if (mergeContent != null) {
mergeContent.applyChanges();
}
}
@Override
public int getResult() {
return myResult;
}
@Nullable
public MergeContent getMergeContent() {
if (myDiffContents[1] instanceof MergeContent) {
return (MergeContent)myDiffContents[1];
}
return null;
}
@Nullable
public DiffContent getResultContent() {
return getMergeContent();
}
@Override
public void restoreOriginalContent() {
final MergeContent mergeContent = getMergeContent();
if (mergeContent == null) return;
mergeContent.restoreOriginalContent();
}
public String getHelpId() {
return myHelpId;
}
@Override
public void setHelpId(@Nullable @NonNls String helpId) {
myHelpId = helpId;
}
public static class MergeContent extends DiffContent {
@NotNull private final MergeVersion myTarget;
private final Document myWorkingDocument;
private final Project myProject;
public MergeContent(@NotNull MergeVersion target, Project project) {
myTarget = target;
myProject = project;
myWorkingDocument = myTarget.createWorkingDocument(project);
LOG.assertTrue(myWorkingDocument.isWritable());
}
public void applyChanges() {
myTarget.applyText(myWorkingDocument.getText(), myProject);
}
@Override
public Document getDocument() {
return myWorkingDocument;
}
@Override
public Navigatable getOpenFileDescriptor(int offset) {
VirtualFile file = getFile();
if (file == null) return null;
return PsiNavigationSupport.getInstance().createNavigatable(myProject, file, offset);
}
@Override
public VirtualFile getFile() {
return myTarget.getFile();
}
@Override
@Nullable
public FileType getContentType() {
return myTarget.getContentType();
}
@Override
public byte[] getBytes() throws IOException {
return myTarget.getBytes();
}
public void restoreOriginalContent() {
myTarget.restoreOriginalContent(myProject);
}
@NotNull
public MergeVersion getMergeVersion() {
return myTarget;
}
}
}
| |
/*
* Copyright 2015-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.dataflow.core.dsl;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
/**
* Parser for stream DSL that generates {@link StreamNode}.
*
* @author Andy Clement
* @author Patrick Peralta
* @author Ilayaperumal Gopinathan
* @author Mark Fisher
*/
public class StreamParser extends ModuleParser {
/**
* Stream name (may be {@code null}).
*/
private final String name;
/**
* Stream DSL text.
*/
private final String dsl;
/**
* Construct a {@code StreamParser} without supplying the stream name up front.
* The stream name may be embedded in the definition; for example:
* {@code mystream = http | file}.
*
* @param dsl the stream definition DSL text
*/
public StreamParser(String dsl) {
this(null, dsl);
}
/**
* Construct a {@code StreamParser} for a stream with the provided name.
*
* @param name stream name
* @param dsl stream dsl text
*/
public StreamParser(String name, String dsl) {
super(new Tokens(dsl));
this.name = name;
this.dsl = dsl;
}
/**
* Parse a stream definition.
*
* @return the AST for the parsed stream
* @throws ParseException
*/
public StreamNode parse() {
StreamNode ast = eatStream();
// Check the stream name, however it was specified
if (ast.getName() != null && !isValidName(ast.getName())) {
throw new ParseException(ast.getName(), 0, DSLMessage.ILLEGAL_STREAM_NAME, ast.getName());
}
if (name != null && !isValidName(name)) {
throw new ParseException(name, 0, DSLMessage.ILLEGAL_STREAM_NAME, name);
}
// Check that each module has a unique label (either explicit or implicit)
Map<String, ModuleNode> alreadySeen = new LinkedHashMap<String, ModuleNode>();
for (int m = 0; m < ast.getModuleNodes().size(); m++) {
ModuleNode node = ast.getModuleNodes().get(m);
ModuleNode previous = alreadySeen.put(node.getLabelName(), node);
if (previous != null) {
String duplicate = node.getLabelName();
int previousIndex = new ArrayList<String>(alreadySeen.keySet()).indexOf(duplicate);
throw new ParseException(dsl, node.startPos, DSLMessage.DUPLICATE_LABEL,
duplicate, previous.getName(), previousIndex, node.getName(), m);
}
}
// Check if the stream name is same as that of any of its modules' names
// Can lead to infinite recursion during resolution, when parsing a composite module.
if (ast.getModule(name) != null) {
throw new ParseException(dsl, dsl.indexOf(name),
DSLMessage.STREAM_NAME_MATCHING_MODULE_NAME,
name);
}
Tokens tokens = getTokens();
if (tokens.hasNext()) {
tokens.raiseException(tokens.peek().startPos, DSLMessage.MORE_INPUT,
toString(tokens.next()));
}
return ast;
}
/**
* If a stream name is present, return it and advance the token position -
* otherwise return {@code null}.
* <p>
* Expected format:
* {@code name =}
*
* @return stream name if present
*/
private String eatStreamName() {
Tokens tokens = getTokens();
String streamName = null;
if (tokens.lookAhead(1, TokenKind.EQUALS)) {
if (tokens.peek(TokenKind.IDENTIFIER)) {
streamName = tokens.eat(TokenKind.IDENTIFIER).data;
tokens.next(); // skip '='
}
else {
tokens.raiseException(tokens.peek().startPos, DSLMessage.ILLEGAL_STREAM_NAME,
toString(tokens.peek()));
}
}
return streamName;
}
/**
* Return a {@link StreamNode} based on the tokens resulting from the parsed DSL.
* <p>
* Expected format:
* {@code stream: (streamName) (sourceDestination) moduleList (sinkDestination)}
*
* @return {@code StreamNode} based on parsed DSL
*/
private StreamNode eatStream() {
String streamName = eatStreamName();
SourceDestinationNode sourceDestinationNode = eatSourceDestination();
// This construct: :foo > :bar is a source then a sink destination
// with no module. Special handling for that is right here:
boolean bridge = false;
if (sourceDestinationNode != null) { // so if we are just after a '>'
if (looksLikeDestination() && noMorePipes()) {
bridge = true;
}
}
Tokens tokens = getTokens();
List<ModuleNode> moduleNodes = new ArrayList<>();
if (bridge) {
// Create a bridge module to hang the source/sink destinations off
tokens.decrementPosition(); // Rewind so we can nicely eat the sink destination
moduleNodes.add(new ModuleNode(null, "bridge", tokens.peek().startPos,
tokens.peek().endPos, null));
}
else {
moduleNodes.addAll(eatModuleList());
}
SinkDestinationNode sinkDestinationNode = eatSinkDestination();
// Further data is an error
if (tokens.hasNext()) {
Token t = tokens.peek();
DSLMessage errorMessage = DSLMessage.UNEXPECTED_DATA_AFTER_STREAMDEF;
if (!moduleNodes.isEmpty() && sinkDestinationNode == null &&
tokens.getTokenStream().get(tokens.position() - 1).isKind(TokenKind.GT)) {
// Additional token where a destination is expected, but has no prefix
errorMessage = DSLMessage.EXPECTED_DESTINATION_PREFIX;
}
tokens.raiseException(t.startPos, errorMessage, toString(t));
}
return new StreamNode(tokens.getExpression(), streamName, moduleNodes,
sourceDestinationNode, sinkDestinationNode);
}
/**
* Return {@code true} if no more pipes are present from the current token position.
*
* @return {@code true} if no more pipes are present from the current token position
*/
private boolean noMorePipes() {
return noMorePipes(getTokens().position());
}
/**
* Return {@code true} if no more pipes are present from the given position.
*
* @param position token position from which to check for the presence of pipes
* @return {@code true} if no more pipes are present from the given position
*/
private boolean noMorePipes(int position) {
List<Token> tokenList = getTokens().getTokenStream();
int tokenStreamLength = tokenList.size();
while (position < tokenStreamLength) {
if (tokenList.get(position++).getKind() == TokenKind.PIPE) {
return false;
}
}
return true;
}
/**
* Return {@code true} if the current token position appears to be pointing
* at a destination.
*
* @return {@code true} if the current token position appears to be pointing
* at a destination
*/
private boolean looksLikeDestination() {
return looksLikeDestination(getTokens().position());
}
/**
* Return {@code true} if the indicated position appears to be pointing at a destination.
*
* @param position token position to check
* @return {@code true} if the indicated position appears to be pointing at a destination.
*/
private boolean looksLikeDestination(int position) {
Tokens tokens = getTokens();
List<Token> tokenList = tokens.getTokenStream();
if (tokens.hasNext() && tokenList.get(position).getKind() == TokenKind.COLON) {
if (tokenList.get(position - 1).isKind(TokenKind.GT)) {
return true;
}
}
return false;
}
/**
* If the current token position contains a source destination, return a
* {@link SourceDestinationNode} and advance the token position; otherwise
* return {@code null}.
* <p>
* Expected format:
* {@code ':' identifier >}
* {@code ':' identifier '.' identifier >}
*
* @return a {@code SourceDestinationNode} or {@code null} if the token
* position is not pointing at a source destination
*/
private SourceDestinationNode eatSourceDestination() {
Tokens tokens = getTokens();
boolean gtBeforePipe = false;
// Seek for a GT(>) before a PIPE(|)
List<Token> tokenList = tokens.getTokenStream();
for (int i = tokens.position(); i < tokenList.size(); i++) {
Token t = tokenList.get(i);
if (t.getKind() == TokenKind.GT) {
gtBeforePipe = true;
break;
}
else if (t.getKind() == TokenKind.PIPE) {
break;
}
}
if (!gtBeforePipe) {
return null;
}
DestinationNode destinationNode = eatDestinationReference();
if (destinationNode == null) {
return null;
}
Token gt = tokens.eat(TokenKind.GT);
return new SourceDestinationNode(destinationNode, gt.endPos);
}
/**
* If the current token position contains a sink destination, return a
* {@link SinkDestinationNode} and advance the token position; otherwise
* return {@code null}.
* <p>
* Expected format:
* {@code '>' ':' identifier}
*
* @return a {@code SinkDestinationNode} or {@code null} if the token
* position is not pointing at a sink destination
*/
private SinkDestinationNode eatSinkDestination() {
Tokens tokens = getTokens();
SinkDestinationNode SinkDestinationNode = null;
if (tokens.peek(TokenKind.GT)) {
Token gt = tokens.eat(TokenKind.GT);
DestinationNode destinationNode = eatDestinationReference();
if (destinationNode == null) {
return null;
}
SinkDestinationNode = new SinkDestinationNode(destinationNode, gt.startPos);
}
return SinkDestinationNode;
}
/**
* Return a {@link DestinationNode} for the token at the current position.
* <p>
* A destination reference is the label component when referencing a specific
* module/label in a stream definition.
*
* Expected format:
* {@code ':' identifier [ '.' identifier ]*}
* <p>
*
* @return {@code DestinationNode} representing the destination reference
*/
private DestinationNode eatDestinationReference() {
Tokens tokens = getTokens();
Token firstToken = tokens.next();
if (!firstToken.isKind(TokenKind.COLON)) {
tokens.decrementPosition();
return null;
}
List<Token> destinationNameComponents = new ArrayList<Token>();
Token identifierToken = tokens.next();
destinationNameComponents.add(identifierToken);
while (tokens.peek(TokenKind.DOT)) {
if (!tokens.isNextAdjacent()) {
tokens.raiseException(tokens.peek().startPos,
DSLMessage.NO_WHITESPACE_IN_DESTINATION_DEFINITION);
}
tokens.next(); // skip dot
if (!tokens.isNextAdjacent()) {
tokens.raiseException(tokens.peek().startPos,
DSLMessage.NO_WHITESPACE_IN_DESTINATION_DEFINITION);
}
destinationNameComponents.add(tokens.eat(TokenKind.IDENTIFIER));
}
int endPos = identifierToken.endPos;
if (!destinationNameComponents.isEmpty()) {
endPos = destinationNameComponents.get(destinationNameComponents.size() - 1).endPos;
}
ArgumentNode[] argumentNodes = eatModuleArgs();
return new DestinationNode(identifierToken.startPos, endPos, tokenListToStringList(destinationNameComponents),
argumentNodes);
}
/**
* Return a list of {@link ModuleNode} starting from the current token position.
* <p>
* Expected format:
* {@code moduleList: module (| module)*}
* A stream may end in a module (if it is a sink) or be followed by a sink destination.
*
* @return a list of {@code ModuleNode}
*/
private List<ModuleNode> eatModuleList() {
Tokens tokens = getTokens();
List<ModuleNode> moduleNodes = new ArrayList<ModuleNode>();
moduleNodes.add(eatModule());
while (tokens.hasNext()) {
Token t = tokens.peek();
if (t.kind == TokenKind.PIPE) {
tokens.next();
moduleNodes.add(eatModule());
}
else {
// might be followed by sink destination
break;
}
}
return moduleNodes;
}
@Override
public String toString() {
Tokens tokens = getTokens();
return String.valueOf(tokens.getTokenStream()) + "\n" +
"tokenStreamPointer=" + tokens.position() + "\n";
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.streaming.connectors.kafka;
import org.apache.flink.api.common.serialization.DeserializationSchema;
import org.apache.flink.streaming.connectors.kafka.config.StartupMode;
import org.apache.flink.streaming.connectors.kafka.internals.KafkaTopicPartition;
import org.apache.flink.table.api.TableException;
import org.apache.flink.table.api.TableSchema;
import org.apache.flink.table.descriptors.DescriptorProperties;
import org.apache.flink.table.descriptors.KafkaValidator;
import org.apache.flink.table.descriptors.SchemaValidator;
import org.apache.flink.table.factories.DeserializationSchemaFactory;
import org.apache.flink.table.factories.StreamTableSourceFactory;
import org.apache.flink.table.factories.TableFactoryService;
import org.apache.flink.table.sources.RowtimeAttributeDescriptor;
import org.apache.flink.table.sources.StreamTableSource;
import org.apache.flink.types.Row;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Properties;
import static org.apache.flink.table.descriptors.ConnectorDescriptorValidator.CONNECTOR_PROPERTY_VERSION;
import static org.apache.flink.table.descriptors.ConnectorDescriptorValidator.CONNECTOR_TYPE;
import static org.apache.flink.table.descriptors.ConnectorDescriptorValidator.CONNECTOR_VERSION;
import static org.apache.flink.table.descriptors.FormatDescriptorValidator.FORMAT;
import static org.apache.flink.table.descriptors.KafkaValidator.CONNECTOR_PROPERTIES;
import static org.apache.flink.table.descriptors.KafkaValidator.CONNECTOR_PROPERTIES_KEY;
import static org.apache.flink.table.descriptors.KafkaValidator.CONNECTOR_PROPERTIES_VALUE;
import static org.apache.flink.table.descriptors.KafkaValidator.CONNECTOR_SPECIFIC_OFFSETS;
import static org.apache.flink.table.descriptors.KafkaValidator.CONNECTOR_SPECIFIC_OFFSETS_OFFSET;
import static org.apache.flink.table.descriptors.KafkaValidator.CONNECTOR_SPECIFIC_OFFSETS_PARTITION;
import static org.apache.flink.table.descriptors.KafkaValidator.CONNECTOR_STARTUP_MODE;
import static org.apache.flink.table.descriptors.KafkaValidator.CONNECTOR_TOPIC;
import static org.apache.flink.table.descriptors.KafkaValidator.CONNECTOR_TYPE_VALUE_KAFKA;
import static org.apache.flink.table.descriptors.RowtimeValidator.ROWTIME_TIMESTAMPS_CLASS;
import static org.apache.flink.table.descriptors.RowtimeValidator.ROWTIME_TIMESTAMPS_FROM;
import static org.apache.flink.table.descriptors.RowtimeValidator.ROWTIME_TIMESTAMPS_SERIALIZED;
import static org.apache.flink.table.descriptors.RowtimeValidator.ROWTIME_TIMESTAMPS_TYPE;
import static org.apache.flink.table.descriptors.RowtimeValidator.ROWTIME_WATERMARKS_CLASS;
import static org.apache.flink.table.descriptors.RowtimeValidator.ROWTIME_WATERMARKS_DELAY;
import static org.apache.flink.table.descriptors.RowtimeValidator.ROWTIME_WATERMARKS_SERIALIZED;
import static org.apache.flink.table.descriptors.RowtimeValidator.ROWTIME_WATERMARKS_TYPE;
import static org.apache.flink.table.descriptors.SchemaValidator.SCHEMA;
import static org.apache.flink.table.descriptors.SchemaValidator.SCHEMA_FROM;
import static org.apache.flink.table.descriptors.SchemaValidator.SCHEMA_NAME;
import static org.apache.flink.table.descriptors.SchemaValidator.SCHEMA_PROCTIME;
import static org.apache.flink.table.descriptors.SchemaValidator.SCHEMA_TYPE;
import static org.apache.flink.table.descriptors.StreamTableDescriptorValidator.UPDATE_MODE;
import static org.apache.flink.table.descriptors.StreamTableDescriptorValidator.UPDATE_MODE_VALUE_APPEND;
/**
* Factory for creating configured instances of {@link KafkaTableSource}.
*/
public abstract class KafkaTableSourceFactory implements StreamTableSourceFactory<Row> {
@Override
public Map<String, String> requiredContext() {
Map<String, String> context = new HashMap<>();
context.put(UPDATE_MODE(), UPDATE_MODE_VALUE_APPEND()); // append mode
context.put(CONNECTOR_TYPE(), CONNECTOR_TYPE_VALUE_KAFKA); // kafka
context.put(CONNECTOR_VERSION(), kafkaVersion()); // version
context.put(CONNECTOR_PROPERTY_VERSION(), "1"); // backwards compatibility
return context;
}
@Override
public List<String> supportedProperties() {
List<String> properties = new ArrayList<>();
// kafka
properties.add(CONNECTOR_TOPIC);
properties.add(CONNECTOR_PROPERTIES);
properties.add(CONNECTOR_PROPERTIES + ".#." + CONNECTOR_PROPERTIES_KEY);
properties.add(CONNECTOR_PROPERTIES + ".#." + CONNECTOR_PROPERTIES_VALUE);
properties.add(CONNECTOR_STARTUP_MODE);
properties.add(CONNECTOR_SPECIFIC_OFFSETS + ".#." + CONNECTOR_SPECIFIC_OFFSETS_PARTITION);
properties.add(CONNECTOR_SPECIFIC_OFFSETS + ".#." + CONNECTOR_SPECIFIC_OFFSETS_OFFSET);
// schema
properties.add(SCHEMA() + ".#." + SCHEMA_TYPE());
properties.add(SCHEMA() + ".#." + SCHEMA_NAME());
properties.add(SCHEMA() + ".#." + SCHEMA_FROM());
// time attributes
properties.add(SCHEMA() + ".#." + SCHEMA_PROCTIME());
properties.add(SCHEMA() + ".#." + ROWTIME_TIMESTAMPS_TYPE());
properties.add(SCHEMA() + ".#." + ROWTIME_TIMESTAMPS_FROM());
properties.add(SCHEMA() + ".#." + ROWTIME_TIMESTAMPS_CLASS());
properties.add(SCHEMA() + ".#." + ROWTIME_TIMESTAMPS_SERIALIZED());
properties.add(SCHEMA() + ".#." + ROWTIME_WATERMARKS_TYPE());
properties.add(SCHEMA() + ".#." + ROWTIME_WATERMARKS_CLASS());
properties.add(SCHEMA() + ".#." + ROWTIME_WATERMARKS_SERIALIZED());
properties.add(SCHEMA() + ".#." + ROWTIME_WATERMARKS_DELAY());
// format wildcard
properties.add(FORMAT() + ".*");
return properties;
}
@Override
public StreamTableSource<Row> createStreamTableSource(Map<String, String> properties) {
final DescriptorProperties params = new DescriptorProperties(true);
params.putProperties(properties);
// validate
// allow Kafka timestamps to be used, watermarks can not be received from source
new SchemaValidator(true, supportsKafkaTimestamps(), false).validate(params);
new KafkaValidator().validate(params);
// deserialization schema using format discovery
final DeserializationSchemaFactory<?> formatFactory = TableFactoryService.find(
DeserializationSchemaFactory.class,
properties,
this.getClass().getClassLoader());
@SuppressWarnings("unchecked")
final DeserializationSchema<Row> deserializationSchema = (DeserializationSchema<Row>) formatFactory
.createDeserializationSchema(properties);
// schema
final TableSchema schema = params.getTableSchema(SCHEMA());
// proctime
final Optional<String> proctimeAttribute = SchemaValidator.deriveProctimeAttribute(params);
// rowtime
final List<RowtimeAttributeDescriptor> rowtimeAttributes = SchemaValidator.deriveRowtimeAttributes(params);
// field mapping
final Map<String, String> fieldMapping = SchemaValidator.deriveFieldMapping(params, Optional.of(schema));
// properties
final Properties kafkaProperties = new Properties();
final List<Map<String, String>> propsList = params.getFixedIndexedProperties(
CONNECTOR_PROPERTIES,
Arrays.asList(CONNECTOR_PROPERTIES_KEY, CONNECTOR_PROPERTIES_VALUE));
propsList.forEach(kv -> kafkaProperties.put(
params.getString(kv.get(CONNECTOR_PROPERTIES_KEY)),
params.getString(kv.get(CONNECTOR_PROPERTIES_VALUE))
));
// topic
final String topic = params.getString(CONNECTOR_TOPIC);
// startup mode
final Map<KafkaTopicPartition, Long> specificOffsets = new HashMap<>();
final StartupMode startupMode = params
.getOptionalString(CONNECTOR_STARTUP_MODE)
.map(modeString -> {
switch (modeString) {
case KafkaValidator.CONNECTOR_STARTUP_MODE_VALUE_EARLIEST:
return StartupMode.EARLIEST;
case KafkaValidator.CONNECTOR_STARTUP_MODE_VALUE_LATEST:
return StartupMode.LATEST;
case KafkaValidator.CONNECTOR_STARTUP_MODE_VALUE_GROUP_OFFSETS:
return StartupMode.GROUP_OFFSETS;
case KafkaValidator.CONNECTOR_STARTUP_MODE_VALUE_SPECIFIC_OFFSETS:
final List<Map<String, String>> offsetList = params.getFixedIndexedProperties(
CONNECTOR_SPECIFIC_OFFSETS,
Arrays.asList(CONNECTOR_SPECIFIC_OFFSETS_PARTITION, CONNECTOR_SPECIFIC_OFFSETS_OFFSET));
offsetList.forEach(kv -> {
final int partition = params.getInt(kv.get(CONNECTOR_SPECIFIC_OFFSETS_PARTITION));
final long offset = params.getLong(kv.get(CONNECTOR_SPECIFIC_OFFSETS_OFFSET));
final KafkaTopicPartition topicPartition = new KafkaTopicPartition(topic, partition);
specificOffsets.put(topicPartition, offset);
});
return StartupMode.SPECIFIC_OFFSETS;
default:
throw new TableException("Unsupported startup mode. Validator should have checked that.");
}
}).orElse(StartupMode.GROUP_OFFSETS);
return createKafkaTableSource(
schema,
proctimeAttribute,
rowtimeAttributes,
fieldMapping,
topic,
kafkaProperties,
deserializationSchema,
startupMode,
specificOffsets);
}
// --------------------------------------------------------------------------------------------
// For version-specific factories
// --------------------------------------------------------------------------------------------
/**
* Returns the Kafka version.
*/
protected abstract String kafkaVersion();
/**
* True if the Kafka source supports Kafka timestamps, false otherwise.
*
* @return True if the Kafka source supports Kafka timestamps, false otherwise.
*/
protected abstract boolean supportsKafkaTimestamps();
/**
* Constructs the version-specific Kafka table source.
*
* @param schema Schema of the produced table.
* @param proctimeAttribute Field name of the processing time attribute.
* @param rowtimeAttributeDescriptors Descriptor for a rowtime attribute
* @param fieldMapping Mapping for the fields of the table schema to
* fields of the physical returned type.
* @param topic Kafka topic to consume.
* @param properties Properties for the Kafka consumer.
* @param deserializationSchema Deserialization schema for decoding records from Kafka.
* @param startupMode Startup mode for the contained consumer.
* @param specificStartupOffsets Specific startup offsets; only relevant when startup
* mode is {@link StartupMode#SPECIFIC_OFFSETS}.
*/
protected abstract KafkaTableSource createKafkaTableSource(
TableSchema schema,
Optional<String> proctimeAttribute,
List<RowtimeAttributeDescriptor> rowtimeAttributeDescriptors,
Map<String, String> fieldMapping,
String topic, Properties properties,
DeserializationSchema<Row> deserializationSchema,
StartupMode startupMode,
Map<KafkaTopicPartition, Long> specificStartupOffsets);
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.rollup;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.elasticsearch.search.aggregations.bucket.histogram.LongBounds;
import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.range.GeoDistanceAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.MinAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.StatsAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder;
import org.elasticsearch.search.aggregations.support.ValueType;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
import org.elasticsearch.test.ESTestCase;
import org.hamcrest.Matchers;
import org.junit.Before;
import java.io.IOException;
import java.time.ZoneId;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import java.util.stream.Collectors;
import static java.util.Collections.emptyList;
import static org.elasticsearch.xpack.rollup.RollupRequestTranslator.translateAggregation;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.core.IsInstanceOf.instanceOf;
public class RollupRequestTranslationTests extends ESTestCase {
private NamedWriteableRegistry namedWriteableRegistry;
@Override
@Before
public void setUp() throws Exception {
super.setUp();
SearchModule searchModule = new SearchModule(Settings.EMPTY, emptyList());
List<NamedWriteableRegistry.Entry> entries = new ArrayList<>();
entries.addAll(searchModule.getNamedWriteables());
namedWriteableRegistry = new NamedWriteableRegistry(entries);
}
public void testBasicDateHisto() {
DateHistogramAggregationBuilder histo = new DateHistogramAggregationBuilder("test_histo");
histo.calendarInterval(new DateHistogramInterval("1d"))
.field("foo")
.extendedBounds(new LongBounds(0L, 1000L))
.subAggregation(new MaxAggregationBuilder("the_max").field("max_field"))
.subAggregation(new AvgAggregationBuilder("the_avg").field("avg_field"));
List<AggregationBuilder> translated = translateAggregation(histo, namedWriteableRegistry);
assertThat(translated.size(), equalTo(1));
assertThat(translated.get(0), Matchers.instanceOf(DateHistogramAggregationBuilder.class));
DateHistogramAggregationBuilder translatedHisto = (DateHistogramAggregationBuilder)translated.get(0);
assertThat(translatedHisto.getCalendarInterval(), equalTo(new DateHistogramInterval("1d")));
assertThat(translatedHisto.field(), equalTo("foo.date_histogram.timestamp"));
assertThat(translatedHisto.getSubAggregations().size(), equalTo(4));
Map<String, AggregationBuilder> subAggs = translatedHisto.getSubAggregations()
.stream().collect(Collectors.toMap(AggregationBuilder::getName, Function.identity()));
assertThat(subAggs.get("the_max"), Matchers.instanceOf(MaxAggregationBuilder.class));
assertThat(((MaxAggregationBuilder)subAggs.get("the_max")).field(), equalTo("max_field.max.value"));
assertThat(subAggs.get("the_avg.value"), Matchers.instanceOf(SumAggregationBuilder.class));
SumAggregationBuilder avg = (SumAggregationBuilder)subAggs.get("the_avg.value");
assertThat(avg.field(), equalTo("avg_field.avg.value"));
assertThat(subAggs.get("the_avg._count"), Matchers.instanceOf(SumAggregationBuilder.class));
assertThat(((SumAggregationBuilder)subAggs.get("the_avg._count")).field(),
equalTo("avg_field.avg._count"));
assertThat(subAggs.get("test_histo._count"), Matchers.instanceOf(SumAggregationBuilder.class));
assertThat(((SumAggregationBuilder)subAggs.get("test_histo._count")).field(),
equalTo("foo.date_histogram._count"));
}
public void testFormattedDateHisto() {
DateHistogramAggregationBuilder histo = new DateHistogramAggregationBuilder("test_histo");
histo.calendarInterval(new DateHistogramInterval("1d"))
.field("foo")
.extendedBounds(new LongBounds(0L, 1000L))
.format("yyyy-MM-dd")
.subAggregation(new MaxAggregationBuilder("the_max").field("max_field"));
List<AggregationBuilder> translated = translateAggregation(histo, namedWriteableRegistry);
assertThat(translated.size(), equalTo(1));
assertThat(translated.get(0), Matchers.instanceOf(DateHistogramAggregationBuilder.class));
DateHistogramAggregationBuilder translatedHisto = (DateHistogramAggregationBuilder)translated.get(0);
assertThat(translatedHisto.getCalendarInterval(), equalTo(new DateHistogramInterval("1d")));
assertThat(translatedHisto.format(), equalTo("yyyy-MM-dd"));
assertThat(translatedHisto.field(), equalTo("foo.date_histogram.timestamp"));
}
public void testSimpleMetric() {
int i = ESTestCase.randomIntBetween(0, 2);
List<AggregationBuilder> translated = new ArrayList<>();
Class<? extends AggregationBuilder> clazz = null;
String fieldName = null;
int numAggs = 1;
if (i == 0) {
translated = translateAggregation(new MaxAggregationBuilder("test_metric")
.field("foo"), namedWriteableRegistry);
clazz = MaxAggregationBuilder.class;
fieldName = "foo.max.value";
} else if (i == 1) {
translated = translateAggregation(new MinAggregationBuilder("test_metric")
.field("foo"), namedWriteableRegistry);
clazz = MinAggregationBuilder.class;
fieldName = "foo.min.value";
} else if (i == 2) {
translated = translateAggregation(new SumAggregationBuilder("test_metric")
.field("foo"), namedWriteableRegistry);
clazz = SumAggregationBuilder.class;
fieldName = "foo.sum.value";
}
assertThat(translated.size(), equalTo(numAggs));
assertThat(translated.get(0), Matchers.instanceOf(clazz));
assertThat((translated.get(0)).getName(), equalTo("test_metric"));
assertThat(((ValuesSourceAggregationBuilder)translated.get(0)).field(), equalTo(fieldName));
}
public void testUnsupportedMetric() {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> translateAggregation(new StatsAggregationBuilder("test_metric")
.field("foo"), namedWriteableRegistry));
assertThat(e.getMessage(), equalTo("Unable to translate aggregation tree into Rollup. Aggregation [test_metric] is of type " +
"[StatsAggregationBuilder] which is currently unsupported."));
}
public void testDateHistoIntervalWithMinMax() {
DateHistogramAggregationBuilder histo = new DateHistogramAggregationBuilder("test_histo");
histo.calendarInterval(new DateHistogramInterval("1d"))
.field("foo")
.subAggregation(new MaxAggregationBuilder("the_max").field("max_field"))
.subAggregation(new AvgAggregationBuilder("the_avg").field("avg_field"));
List<AggregationBuilder> translated = translateAggregation(histo, namedWriteableRegistry);
assertThat(translated.size(), equalTo(1));
assertThat(translated.get(0), instanceOf(DateHistogramAggregationBuilder.class));
DateHistogramAggregationBuilder translatedHisto = (DateHistogramAggregationBuilder)translated.get(0);
assertThat(translatedHisto.getCalendarInterval().toString(), equalTo("1d"));
assertThat(translatedHisto.field(), equalTo("foo.date_histogram.timestamp"));
assertThat(translatedHisto.getSubAggregations().size(), equalTo(4));
Map<String, AggregationBuilder> subAggs = translatedHisto.getSubAggregations()
.stream().collect(Collectors.toMap(AggregationBuilder::getName, Function.identity()));
assertThat(subAggs.get("the_max"), instanceOf(MaxAggregationBuilder.class));
assertThat(((MaxAggregationBuilder)subAggs.get("the_max")).field(), equalTo("max_field.max.value"));
assertThat(subAggs.get("the_avg.value"), instanceOf(SumAggregationBuilder.class));
SumAggregationBuilder avg = (SumAggregationBuilder)subAggs.get("the_avg.value");
assertThat(avg.field(), equalTo("avg_field.avg.value"));
assertThat(subAggs.get("the_avg._count"), instanceOf(SumAggregationBuilder.class));
assertThat(((SumAggregationBuilder)subAggs.get("the_avg._count")).field(),
equalTo("avg_field.avg._count"));
assertThat(subAggs.get("test_histo._count"), instanceOf(SumAggregationBuilder.class));
assertThat(((SumAggregationBuilder)subAggs.get("test_histo._count")).field(),
equalTo("foo.date_histogram._count"));
}
public void testDateHistoLongIntervalWithMinMax() {
DateHistogramAggregationBuilder histo = new DateHistogramAggregationBuilder("test_histo");
histo.interval(86400000)
.field("foo")
.subAggregation(new MaxAggregationBuilder("the_max").field("max_field"))
.subAggregation(new AvgAggregationBuilder("the_avg").field("avg_field"));
List<AggregationBuilder> translated = translateAggregation(histo, namedWriteableRegistry);
assertThat(translated.size(), equalTo(1));
assertThat(translated.get(0), instanceOf(DateHistogramAggregationBuilder.class));
DateHistogramAggregationBuilder translatedHisto = (DateHistogramAggregationBuilder)translated.get(0);
assertNull(translatedHisto.getCalendarInterval());
assertThat(translatedHisto.getFixedInterval(), equalTo(new DateHistogramInterval("86400000ms")));
assertThat(translatedHisto.field(), equalTo("foo.date_histogram.timestamp"));
assertThat(translatedHisto.getSubAggregations().size(), equalTo(4));
Map<String, AggregationBuilder> subAggs = translatedHisto.getSubAggregations()
.stream().collect(Collectors.toMap(AggregationBuilder::getName, Function.identity()));
assertThat(subAggs.get("the_max"), instanceOf(MaxAggregationBuilder.class));
assertThat(((MaxAggregationBuilder)subAggs.get("the_max")).field(), equalTo("max_field.max.value"));
assertThat(subAggs.get("the_avg.value"), instanceOf(SumAggregationBuilder.class));
SumAggregationBuilder avg = (SumAggregationBuilder)subAggs.get("the_avg.value");
assertThat(avg.field(), equalTo("avg_field.avg.value"));
assertThat(subAggs.get("the_avg._count"), instanceOf(SumAggregationBuilder.class));
assertThat(((SumAggregationBuilder)subAggs.get("the_avg._count")).field(),
equalTo("avg_field.avg._count"));
assertThat(subAggs.get("test_histo._count"), instanceOf(SumAggregationBuilder.class));
assertThat(((SumAggregationBuilder)subAggs.get("test_histo._count")).field(),
equalTo("foo.date_histogram._count"));
assertWarnings("[interval] on [date_histogram] is deprecated, use [fixed_interval] " +
"or [calendar_interval] in the future.");
}
public void testDateHistoWithTimezone() {
ZoneId timeZone = ZoneId.of(randomFrom(ZoneId.getAvailableZoneIds()));
DateHistogramAggregationBuilder histo = new DateHistogramAggregationBuilder("test_histo");
histo.fixedInterval(new DateHistogramInterval("86400000ms"))
.field("foo")
.timeZone(timeZone);
List<AggregationBuilder> translated = translateAggregation(histo, namedWriteableRegistry);
assertThat(translated.size(), equalTo(1));
assertThat(translated.get(0), instanceOf(DateHistogramAggregationBuilder.class));
DateHistogramAggregationBuilder translatedHisto = (DateHistogramAggregationBuilder)translated.get(0);
assertThat(translatedHisto.getFixedInterval().toString(), equalTo("86400000ms"));
assertThat(translatedHisto.field(), equalTo("foo.date_histogram.timestamp"));
assertThat(translatedHisto.timeZone(), equalTo(timeZone));
}
public void testDeprecatedInterval() {
DateHistogramAggregationBuilder histo = new DateHistogramAggregationBuilder("test_histo");
histo.interval(86400000).field("foo");
List<AggregationBuilder> translated = translateAggregation(histo, namedWriteableRegistry);
assertThat(translated.size(), equalTo(1));
assertThat(translated.get(0), instanceOf(DateHistogramAggregationBuilder.class));
DateHistogramAggregationBuilder translatedHisto = (DateHistogramAggregationBuilder)translated.get(0);
assertThat(translatedHisto.getFixedInterval().toString(), equalTo("86400000ms"));
assertThat(translatedHisto.field(), equalTo("foo.date_histogram.timestamp"));
assertWarnings("[interval] on [date_histogram] is deprecated, use [fixed_interval] " +
"or [calendar_interval] in the future.");
}
public void testDeprecatedDateHistoInterval() {
DateHistogramAggregationBuilder histo = new DateHistogramAggregationBuilder("test_histo");
histo.dateHistogramInterval(new DateHistogramInterval("1d")).field("foo");
List<AggregationBuilder> translated = translateAggregation(histo, namedWriteableRegistry);
assertThat(translated.size(), equalTo(1));
assertThat(translated.get(0), instanceOf(DateHistogramAggregationBuilder.class));
DateHistogramAggregationBuilder translatedHisto = (DateHistogramAggregationBuilder)translated.get(0);
assertThat(translatedHisto.dateHistogramInterval().toString(), equalTo("1d"));
assertThat(translatedHisto.field(), equalTo("foo.date_histogram.timestamp"));
assertWarnings("[interval] on [date_histogram] is deprecated, use [fixed_interval] " +
"or [calendar_interval] in the future.");
histo = new DateHistogramAggregationBuilder("test_histo");
histo.dateHistogramInterval(new DateHistogramInterval("4d")).field("foo");
translated = translateAggregation(histo, namedWriteableRegistry);
assertThat(translated.size(), equalTo(1));
assertThat(translated.get(0), instanceOf(DateHistogramAggregationBuilder.class));
translatedHisto = (DateHistogramAggregationBuilder)translated.get(0);
assertThat(translatedHisto.dateHistogramInterval().toString(), equalTo("4d"));
assertThat(translatedHisto.field(), equalTo("foo.date_histogram.timestamp"));
assertWarnings("[interval] on [date_histogram] is deprecated, use [fixed_interval] " +
"or [calendar_interval] in the future.");
}
public void testAvgMetric() {
List<AggregationBuilder> translated = translateAggregation(new AvgAggregationBuilder("test_metric")
.field("foo"), namedWriteableRegistry);
assertThat(translated.size(), equalTo(2));
Map<String, AggregationBuilder> metrics = translated.stream()
.collect(Collectors.toMap(AggregationBuilder::getName, Function.identity()));
assertThat(metrics.get("test_metric.value"), Matchers.instanceOf(SumAggregationBuilder.class));
assertThat(((SumAggregationBuilder)metrics.get("test_metric.value")).field(),
equalTo("foo.avg.value"));
assertThat(metrics.get("test_metric._count"), Matchers.instanceOf(SumAggregationBuilder.class));
assertThat(((SumAggregationBuilder)metrics.get("test_metric._count")).field(),
equalTo("foo.avg._count"));
}
public void testStringTerms() throws IOException {
TermsAggregationBuilder terms = new TermsAggregationBuilder("test_string_terms").userValueTypeHint(ValueType.STRING);
terms.field("foo")
.subAggregation(new MaxAggregationBuilder("the_max").field("max_field"))
.subAggregation(new AvgAggregationBuilder("the_avg").field("avg_field"));
List<AggregationBuilder> translated = translateAggregation(terms, namedWriteableRegistry);
assertThat(translated.size(), equalTo(1));
assertThat(translated.get(0), Matchers.instanceOf(TermsAggregationBuilder.class));
TermsAggregationBuilder translatedHisto = (TermsAggregationBuilder)translated.get(0);
assertThat(translatedHisto.field(), equalTo("foo.terms.value"));
assertThat(translatedHisto.getSubAggregations().size(), equalTo(4));
Map<String, AggregationBuilder> subAggs = translatedHisto.getSubAggregations()
.stream().collect(Collectors.toMap(AggregationBuilder::getName, Function.identity()));
assertThat(subAggs.get("the_max"), Matchers.instanceOf(MaxAggregationBuilder.class));
assertThat(((MaxAggregationBuilder)subAggs.get("the_max")).field(), equalTo("max_field.max.value"));
assertThat(subAggs.get("the_avg.value"), Matchers.instanceOf(SumAggregationBuilder.class));
SumAggregationBuilder avg = (SumAggregationBuilder)subAggs.get("the_avg.value");
assertThat(avg.field(), equalTo("avg_field.avg.value"));
assertThat(subAggs.get("the_avg._count"), Matchers.instanceOf(SumAggregationBuilder.class));
assertThat(((SumAggregationBuilder)subAggs.get("the_avg._count")).field(),
equalTo("avg_field.avg._count"));
assertThat(subAggs.get("test_string_terms._count"), Matchers.instanceOf(SumAggregationBuilder.class));
assertThat(((SumAggregationBuilder)subAggs.get("test_string_terms._count")).field(),
equalTo("foo.terms._count"));
}
public void testBasicHisto() {
HistogramAggregationBuilder histo = new HistogramAggregationBuilder("test_histo");
histo.field("foo")
.interval(1L)
.extendedBounds(0.0, 1000.0)
.subAggregation(new MaxAggregationBuilder("the_max").field("max_field"))
.subAggregation(new AvgAggregationBuilder("the_avg").field("avg_field"));
List<AggregationBuilder> translated = translateAggregation(histo, namedWriteableRegistry);
assertThat(translated.size(), equalTo(1));
assertThat(translated.get(0), Matchers.instanceOf(HistogramAggregationBuilder.class));
HistogramAggregationBuilder translatedHisto = (HistogramAggregationBuilder)translated.get(0);
assertThat(translatedHisto.field(), equalTo("foo.histogram.value"));
assertThat(translatedHisto.getSubAggregations().size(), equalTo(4));
Map<String, AggregationBuilder> subAggs = translatedHisto.getSubAggregations()
.stream().collect(Collectors.toMap(AggregationBuilder::getName, Function.identity()));
assertThat(subAggs.get("the_max"), Matchers.instanceOf(MaxAggregationBuilder.class));
assertThat(((MaxAggregationBuilder)subAggs.get("the_max")).field(), equalTo("max_field.max.value"));
assertThat(subAggs.get("the_avg.value"), Matchers.instanceOf(SumAggregationBuilder.class));
SumAggregationBuilder avg = (SumAggregationBuilder)subAggs.get("the_avg.value");
assertThat(avg.field(), equalTo("avg_field.avg.value"));
assertThat(subAggs.get("the_avg._count"), Matchers.instanceOf(SumAggregationBuilder.class));
assertThat(((SumAggregationBuilder)subAggs.get("the_avg._count")).field(),
equalTo("avg_field.avg._count"));
assertThat(subAggs.get("test_histo._count"), Matchers.instanceOf(SumAggregationBuilder.class));
assertThat(((SumAggregationBuilder)subAggs.get("test_histo._count")).field(),
equalTo("foo.histogram._count"));
}
public void testUnsupportedAgg() {
GeoDistanceAggregationBuilder geo = new GeoDistanceAggregationBuilder("test_geo", new GeoPoint(0.0, 0.0));
geo.field("foo")
.subAggregation(new MaxAggregationBuilder("the_max").field("max_field"))
.subAggregation(new AvgAggregationBuilder("the_avg").field("avg_field"));
Exception e = expectThrows(RuntimeException.class,
() -> translateAggregation(geo, namedWriteableRegistry));
assertThat(e.getMessage(), equalTo("Unable to translate aggregation tree into Rollup. Aggregation [test_geo] is of type " +
"[GeoDistanceAggregationBuilder] which is currently unsupported."));
}
}
| |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.lang.properties.references;
import com.intellij.codeInsight.CodeInsightBundle;
import com.intellij.ide.fileTemplates.FileTemplate;
import com.intellij.ide.fileTemplates.FileTemplateManager;
import com.intellij.ide.fileTemplates.FileTemplateUtil;
import com.intellij.ide.util.PropertiesComponent;
import com.intellij.ide.util.TreeFileChooser;
import com.intellij.ide.util.TreeFileChooserFactory;
import com.intellij.lang.properties.IProperty;
import com.intellij.lang.properties.LastSelectedPropertiesFileStore;
import com.intellij.lang.properties.PropertiesImplUtil;
import com.intellij.lang.properties.psi.PropertiesFile;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.fileTypes.FileTypeManager;
import com.intellij.openapi.fileTypes.StdFileTypes;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.DialogWrapper;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.ThrowableComputable;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.LocalFileSystem;
import com.intellij.openapi.vfs.VfsUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiManager;
import com.intellij.psi.impl.source.resolve.FileContextUtil;
import com.intellij.ui.DocumentAdapter;
import com.intellij.ui.GuiUtils;
import com.intellij.ui.TextFieldWithHistory;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import javax.swing.*;
import javax.swing.event.DocumentEvent;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.File;
import java.io.IOException;
import java.text.Normalizer;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.regex.Pattern;
/**
* @author cdr
*/
public class I18nizeQuickFixDialog extends DialogWrapper implements I18nizeQuickFixModel {
protected static final Logger LOG = Logger.getInstance("#com.intellij.codeInsight.i18n.I18nizeQuickFixDialog");
private static final Pattern PATTERN = Pattern.compile("\\p{InCombiningDiacriticalMarks}+");
private JTextField myValue;
private JComboBox myKey;
private final TextFieldWithHistory myPropertiesFile;
protected JPanel myPanel;
private JCheckBox myUseResourceBundle;
protected final Project myProject;
protected final PsiFile myContext;
private JPanel myPropertiesFilePanel;
protected JPanel myExtensibilityPanel;
protected final String myDefaultPropertyValue;
protected final DialogCustomization myCustomization;
public static class DialogCustomization {
private final String title;
private final boolean suggestExistingProperties;
private final boolean focusValueComponent;
private final List<PropertiesFile> propertiesFiles;
private final String suggestedName;
public DialogCustomization(String title, boolean suggestExistingProperties, boolean focusValueComponent,
List<PropertiesFile> propertiesFiles,
String suggestedName) {
this.title = title;
this.suggestExistingProperties = suggestExistingProperties;
this.focusValueComponent = focusValueComponent;
this.propertiesFiles = propertiesFiles;
this.suggestedName = suggestedName;
}
public DialogCustomization() {
this(null, true, false, null, null);
}
public String getSuggestedName() {
return suggestedName;
}
}
public I18nizeQuickFixDialog(@NotNull Project project,
@NotNull final PsiFile context,
String defaultPropertyValue,
DialogCustomization customization
) {
this(project, context, defaultPropertyValue, customization, false);
}
protected I18nizeQuickFixDialog(@NotNull Project project,
@NotNull final PsiFile context,
String defaultPropertyValue,
DialogCustomization customization,
boolean ancestorResponsible) {
super(false);
myProject = project;
myContext = FileContextUtil.getContextFile(context);
myDefaultPropertyValue = defaultPropertyValue;
myCustomization = customization != null ? customization:new DialogCustomization();
setTitle(myCustomization.title != null ? myCustomization.title:CodeInsightBundle.message("i18nize.dialog.title"));
myPropertiesFile = new TextFieldWithHistory();
myPropertiesFile.setHistorySize(-1);
myPropertiesFilePanel.add(GuiUtils.constructFieldWithBrowseButton(myPropertiesFile, new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
TreeFileChooserFactory chooserFactory = TreeFileChooserFactory.getInstance(myProject);
final PropertiesFile propertiesFile = getPropertiesFile();
TreeFileChooser fileChooser = chooserFactory.createFileChooser(
CodeInsightBundle.message("i18nize.dialog.property.file.chooser.title"), propertiesFile != null ? propertiesFile.getContainingFile() : null, StdFileTypes.PROPERTIES, null);
fileChooser.showDialog();
PsiFile selectedFile = fileChooser.getSelectedFile();
if (selectedFile == null) return;
myPropertiesFile.setText(FileUtil.toSystemDependentName(selectedFile.getVirtualFile().getPath()));
}
}), BorderLayout.CENTER);
myPropertiesFile.addDocumentListener(new DocumentAdapter() {
@Override
protected void textChanged(@NotNull DocumentEvent e) {
propertiesFileChanged();
somethingChanged();
}
});
getKeyTextField().getDocument().addDocumentListener(new DocumentAdapter() {
@Override
protected void textChanged(@NotNull DocumentEvent e) {
somethingChanged();
}
});
myValue.getDocument().addDocumentListener(new DocumentAdapter() {
@Override
protected void textChanged(@NotNull DocumentEvent e) {
somethingChanged();
}
});
@NonNls final String KEY = "I18NIZE_DIALOG_USE_RESOURCE_BUNDLE";
final boolean useBundleByDefault =
!PropertiesComponent.getInstance().isValueSet(KEY) || PropertiesComponent.getInstance().isTrueValue(KEY);
myUseResourceBundle.setSelected(useBundleByDefault);
myUseResourceBundle.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
PropertiesComponent.getInstance().setValue(KEY, Boolean.valueOf(myUseResourceBundle.isSelected()).toString());
}
});
if (!ancestorResponsible) init();
}
@Override
protected void init() {
populatePropertiesFiles();
propertiesFileChanged();
somethingChanged();
setKeyValueEditBoxes();
super.init();
}
private JTextField getKeyTextField() {
return (JTextField)myKey.getEditor().getEditorComponent();
}
@NotNull
protected List<String> getExistingValueKeys(String value) {
if(!myCustomization.suggestExistingProperties) {
return Collections.emptyList();
}
final ArrayList<String> result = new ArrayList<>();
// check if property value already exists among properties file values and suggest corresponding key
PropertiesFile propertiesFile = getPropertiesFile();
if (propertiesFile != null) {
for (IProperty property : propertiesFile.getProperties()) {
if (Comparing.strEqual(property.getValue(), value)) {
result.add(0, property.getUnescapedKey());
}
}
}
return result;
}
protected String suggestPropertyKey(String value) {
if (myCustomization.suggestedName != null) {
return myCustomization.suggestedName;
}
// suggest property key not existing in this file
String key = defaultSuggestPropertyKey(value);
value = PATTERN.matcher(Normalizer.normalize(value, Normalizer.Form.NFD)).replaceAll("");
if (key == null) {
final StringBuilder result = new StringBuilder();
boolean insertDotBeforeNextWord = false;
for (int i = 0; i < value.length(); i++) {
final char c = value.charAt(i);
if (Character.isLetterOrDigit(c)) {
if (insertDotBeforeNextWord) {
result.append('.');
}
result.append(Character.toLowerCase(c));
insertDotBeforeNextWord = false;
}
else if (c == '&') { //do not insert dot if there is letter after the amp
if (insertDotBeforeNextWord) continue;
if (i == value.length() - 1) {
continue;
}
if (Character.isLetter(value.charAt(i + 1))) {
continue;
}
insertDotBeforeNextWord = true;
}
else {
if (result.length() > 0) {
insertDotBeforeNextWord = true;
}
}
}
key = result.toString();
}
PropertiesFile propertiesFile = getPropertiesFile();
if (propertiesFile != null) {
if (propertiesFile.findPropertyByKey(key) == null) return key;
int suffix = 1;
while (propertiesFile.findPropertyByKey(key + suffix) != null) {
suffix++;
}
return key + suffix;
}
else {
return key;
}
}
protected String defaultSuggestPropertyKey(String value) {
return null;
}
private void propertiesFileChanged() {
PropertiesFile propertiesFile = getPropertiesFile();
boolean hasResourceBundle =
propertiesFile != null && propertiesFile.getResourceBundle().getPropertiesFiles().size() > 1;
myUseResourceBundle.setEnabled(hasResourceBundle);
}
private void setKeyValueEditBoxes() {
final List<String> existingValueKeys = getExistingValueKeys(myDefaultPropertyValue);
if (existingValueKeys.isEmpty()) {
getKeyTextField().setText(suggestPropertyKey(myDefaultPropertyValue));
}
else {
for (String key : existingValueKeys) {
myKey.addItem(key);
}
myKey.setSelectedItem(existingValueKeys.get(0));
}
myValue.setText(myDefaultPropertyValue);
}
protected void somethingChanged() {
setOKActionEnabled(!StringUtil.isEmptyOrSpaces(getKey()));
}
private void populatePropertiesFiles() {
List<String> paths = suggestPropertiesFiles();
final String lastUrl = suggestSelectedFileUrl(paths);
final String lastPath = lastUrl == null ? null : FileUtil.toSystemDependentName(VfsUtil.urlToPath(lastUrl));
Collections.sort(paths, (path1, path2) -> {
if (lastPath != null && lastPath.equals(path1)) return -1;
if (lastPath != null && lastPath.equals(path2)) return 1;
int r = LastSelectedPropertiesFileStore.getUseCount(path2) - LastSelectedPropertiesFileStore.getUseCount(path1);
return r == 0 ? path1.compareTo(path2) : r;
});
myPropertiesFile.setHistory(paths);
if (lastPath != null) {
myPropertiesFile.setSelectedItem(lastPath);
}
if (myPropertiesFile.getSelectedIndex() == -1 && !paths.isEmpty()) {
myPropertiesFile.setText(paths.get(0));
}
}
private String suggestSelectedFileUrl(List<String> paths) {
if (myDefaultPropertyValue != null) {
for (String path : paths) {
VirtualFile file = LocalFileSystem.getInstance().findFileByPath(FileUtil.toSystemIndependentName(path));
if (file == null) continue;
PsiFile psiFile = myContext.getManager().findFile(file);
if (!(psiFile instanceof PropertiesFile)) continue;
for (IProperty property : ((PropertiesFile)psiFile).getProperties()) {
if (property.getValue().equals(myDefaultPropertyValue)) return path;
}
}
}
return LastSelectedPropertiesFileStore.getInstance().suggestLastSelectedPropertiesFileUrl(myContext);
}
private void saveLastSelectedFile() {
PropertiesFile propertiesFile = getPropertiesFile();
if (propertiesFile != null) {
LastSelectedPropertiesFileStore.getInstance().saveLastSelectedPropertiesFile(myContext, propertiesFile);
}
}
protected List<String> suggestPropertiesFiles() {
if (myCustomization.propertiesFiles != null && !myCustomization.propertiesFiles.isEmpty()) {
ArrayList<String> list = new ArrayList<>();
for (PropertiesFile propertiesFile : myCustomization.propertiesFiles) {
final VirtualFile virtualFile = propertiesFile.getVirtualFile();
if (virtualFile != null) {
list.add(virtualFile.getPath());
}
}
return list;
}
return defaultSuggestPropertiesFiles();
}
protected List<String> defaultSuggestPropertiesFiles() {
return I18nUtil.defaultSuggestPropertiesFiles(myProject);
}
protected PropertiesFile getPropertiesFile() {
String path = FileUtil.toSystemIndependentName(myPropertiesFile.getText());
VirtualFile virtualFile = LocalFileSystem.getInstance().findFileByPath(path);
return virtualFile != null
? PropertiesImplUtil.getPropertiesFile(PsiManager.getInstance(myProject).findFile(virtualFile))
: null;
}
private boolean createPropertiesFileIfNotExists() {
if (getPropertiesFile() != null) return true;
final String path = FileUtil.toSystemIndependentName(myPropertiesFile.getText());
if (StringUtil.isEmptyOrSpaces(path)) {
String message = CodeInsightBundle.message("i18nize.empty.file.path", myPropertiesFile.getText());
Messages.showErrorDialog(myProject, message, CodeInsightBundle.message("i18nize.error.creating.properties.file"));
myPropertiesFile.requestFocusInWindow();
return false;
}
final FileType fileType = FileTypeManager.getInstance().getFileTypeByFileName(path);
if (fileType != StdFileTypes.PROPERTIES && fileType != StdFileTypes.XML) {
String message = CodeInsightBundle.message("i18nize.cant.create.properties.file.because.its.name.is.associated",
myPropertiesFile.getText(), fileType.getDescription());
Messages.showErrorDialog(myProject, message, CodeInsightBundle.message("i18nize.error.creating.properties.file"));
myPropertiesFile.requestFocusInWindow();
return false;
}
try {
final File file = new File(path).getCanonicalFile();
FileUtil.createParentDirs(file);
ApplicationManager.getApplication().runWriteAction(new ThrowableComputable<PsiFile, Exception>() {
@Override
public PsiFile compute() throws Exception {
VirtualFile dir = LocalFileSystem.getInstance().refreshAndFindFileByIoFile(file.getParentFile());
final PsiManager psiManager = PsiManager.getInstance(myProject);
if (dir == null) {
throw new IOException("Error creating directory structure for file '" + path + "'");
}
if (fileType == StdFileTypes.PROPERTIES) {
return psiManager.findFile(dir.createChildData(this, file.getName()));
}
else {
FileTemplate template = FileTemplateManager.getInstance(myProject).getInternalTemplate("XML Properties File.xml");
LOG.assertTrue(template != null);
return (PsiFile)FileTemplateUtil.createFromTemplate(template, file.getName(), null, psiManager.findDirectory(dir));
}
}
});
}
catch (Exception e) {
Messages.showErrorDialog(myProject, e.getLocalizedMessage(), CodeInsightBundle.message("i18nize.error.creating.properties.file"));
return false;
}
return true;
}
@Override
protected JComponent createCenterPanel() {
return myPanel;
}
@Override
public JComponent getPreferredFocusedComponent() {
return myCustomization.focusValueComponent ? myValue:myKey;
}
@Override
public void dispose() {
saveLastSelectedFile();
super.dispose();
}
@Override
protected void doOKAction() {
if (!createPropertiesFileIfNotExists()) return;
Collection<PropertiesFile> propertiesFiles = getAllPropertiesFiles();
for (PropertiesFile propertiesFile : propertiesFiles) {
IProperty existingProperty = propertiesFile.findPropertyByKey(getKey());
final String propValue = myValue.getText();
if (existingProperty != null && !Comparing.strEqual(existingProperty.getValue(), propValue)) {
final String messageText = CodeInsightBundle.message("i18nize.dialog.error.property.already.defined.message", getKey(), propertiesFile.getName());
final int code = Messages.showOkCancelDialog(myProject,
messageText,
CodeInsightBundle.message("i18nize.dialog.error.property.already.defined.title"),
null);
if (code == Messages.CANCEL) {
return;
}
}
}
super.doOKAction();
}
@Override
protected String getHelpId() {
return "editing.propertyFile.i18nInspection";
}
public JComponent getValueComponent() {
return myValue;
}
@Override
public String getValue() {
return myValue.getText();
}
@Override
public String getKey() {
return getKeyTextField().getText();
}
@Override
public boolean hasValidData() {
assert !ApplicationManager.getApplication().isUnitTestMode();
show();
return getExitCode() == OK_EXIT_CODE;
}
private boolean isUseResourceBundle() {
return myUseResourceBundle.isEnabled() && myUseResourceBundle.isSelected();
}
@Override
protected String getDimensionServiceKey() {
return "#com.intellij.codeInsight.i18n.I18nizeQuickFixDialog";
}
@Override
public Collection<PropertiesFile> getAllPropertiesFiles() {
PropertiesFile propertiesFile = getPropertiesFile();
if (propertiesFile == null) return Collections.emptySet();
Collection<PropertiesFile> propertiesFiles;
if (isUseResourceBundle()) {
propertiesFiles = propertiesFile.getResourceBundle().getPropertiesFiles();
}
else {
propertiesFiles = Collections.singleton(propertiesFile);
}
return propertiesFiles;
}
}
| |
package de.ulfbiallas.lantexter.model;
import java.net.InetAddress;
import java.util.ArrayList;
import java.util.Observable;
import java.util.Timer;
import java.util.TimerTask;
import de.ulfbiallas.lantexter.model.message.ChatLeftMessage;
import de.ulfbiallas.lantexter.model.message.ChatMessage;
import de.ulfbiallas.lantexter.model.message.NameChangeMessage;
import de.ulfbiallas.lantexter.model.message.NewParticipantMessage;
import de.ulfbiallas.lantexter.model.network.IPacketProcessor;
import de.ulfbiallas.lantexter.model.network.JsonTools;
import de.ulfbiallas.lantexter.model.network.ServerThread;
import de.ulfbiallas.lantexter.model.network.UDPTools;
import net.sf.json.JSONObject;
/**
* Class which processes the incoming chat messages and
* automatically sends messages.
*
* @author Ulf Biallas
*
*/
public class ChatCore extends Observable implements IPacketProcessor {
private UDPTools updtools;
private Settings settings;
private ParticipantList participantList;
private ChatHistory chatHistory;
private ServerThread serverThread;
/** Timer for an alive signal */
private Timer broadcastTimer;
/** Error flag which is set if a connection could not be established */
private Boolean errorsWhileStarting = false;
/**
* Constructor.
*/
public ChatCore() {
updtools = UDPTools.getInstance();
settings = Settings.getInstance();
participantList = ParticipantList.getInstance();
chatHistory = ChatHistory.getInstance();
}
/**
* Starts the UDP server and the alive signal timer.
*/
public void startServer() {
updtools.init(this, Integer.parseInt(settings.getProperty("port")));
errorsWhileStarting = false;
serverThread = new ServerThread(this, Integer.parseInt(settings.getProperty("port")));
// Send an alive signal as broadcast to find out who is online
broadcastTimer = new Timer();
broadcastTimer.schedule(new TimerTask(){
@Override
public void run() {
UDPTools updtools = UDPTools.getInstance();
String msg = JsonTools.createWhoIsOnlineMessage(settings.getProperty("name"));
updtools.sendStringToIp("255.255.255.255", msg);
}
}, 0, Constants.ALIVE_TIMER_INTERVAL * 1000);
}
/**
* Stops the UDP server.
*/
public void stopServer() {
broadcastTimer.cancel();
serverThread.shutdown();
}
/**
* Changes the nick name.
*
* @param name The new nick name.
*/
public void changeMyName(String name) {
ArrayList<Participant> participants = participantList.getParticipants();
Participant receiver;
String msg;
for(int k=0; k<participants.size(); ++k) {
receiver = participants.get(k);
if(receiver.isOnline()) {
msg = JsonTools.createNewNameMessage(name);
updtools.sendMsg(receiver.getInetAddress(), msg.getBytes());
}
}
}
/**
* Adds a received message to the chat history.
*
* @param author The author of the received message.
* @param msg The text of the received message.
*/
private void addMessageToChatText(String author, String msg) {
ChatMessage cmsg = new ChatMessage(author, msg);
chatHistory.addMessage(cmsg);
}
/**
* @inheritDoc
*/
@Override
public void onError(ModelNotification notification) {
setChanged();
notifyObservers(notification);
errorsWhileStarting = true;
stopServer();
}
/**
* @inheritDoc
*/
@Override
public void onSuccessfulStarted() {
System.out.println("onSuccessfulStarted!");
if(!errorsWhileStarting) {
setChanged();
notifyObservers(ModelNotification.CONNECTION_ESTABLISHED);
}
}
/**
* Informs all participants which are online about leaving the chat.
*/
public void leaveChat() {
ArrayList<Participant> participants = participantList.getParticipants();
Participant receiver;
String msg;
for(int k=0; k<participants.size(); ++k) {
receiver = participants.get(k);
if(receiver.isOnline()) {
msg = JsonTools.createChatLeftMessage(settings.getProperty("name"));
updtools.sendMsg(receiver.getInetAddress(), msg.getBytes());
}
}
}
/**
* Send a chat text message to all participants which are online.
*
* @param msg The message text.
*/
public void sendMessage(String cmsg) {
ArrayList<Participant> participants = participantList.getParticipants();
Participant receiver;
String msg;
for(int k=0; k<participants.size(); ++k) {
receiver = participants.get(k);
if(receiver.isOnline()) {
msg = JsonTools.createChatMessage(settings.getProperty("name"), cmsg);
updtools.sendMsg(receiver.getInetAddress(), msg.getBytes());
}
}
}
/**
* @inheritDoc
*/
@Override
public void processMessage(JSONObject msg_, InetAddress addr_, int port_) {
int code = msg_.getInt("code");
String msg;
String name = "";
Boolean newParticipant = false;
switch(code) {
case 1:
name = msg_.getString("name");
System.out.println("received WhoIsOnlineMessage from " + addr_ + " : " + port_);
msg = JsonTools.createIAmOnlineMessage(settings.getProperty("name"));
updtools.sendMsg(addr_, msg.getBytes());
newParticipant = false;
if(!participantList.containsParticipant(addr_.toString())) {
participantList.addParticipant(participantList.createNewParticipant(addr_.toString(), name, addr_));
newParticipant = true;
} else {
if(!participantList.isOnline(addr_.toString())) newParticipant = true;
participantList.setParticipantOnline(addr_.toString());
}
break;
case 2:
name = msg_.getString("name");
System.out.println("received IAmOnlineMessage from " + name + ": " + addr_ + " : " + port_);
newParticipant = false;
if(!participantList.containsParticipant(addr_.toString())) {
participantList.addParticipant(participantList.createNewParticipant(addr_.toString(), name, addr_));
newParticipant = true;
} else {
if(!participantList.isOnline(addr_.toString())) newParticipant = true;
participantList.setParticipantOnline(addr_.toString());
}
break;
case 3:
name = msg_.getString("name");
msg = msg_.getString("text");
addMessageToChatText(addr_.toString(), msg);
break;
case 4:
name = msg_.getString("name");
String oldName = participantList.getNameOfParticipant(addr_.toString());
participantList.changeName(addr_.toString(), name);
chatHistory.addMessage(new NameChangeMessage(oldName, name));
break;
case 5:
name = msg_.getString("name");
participantList.setParticipantOffline(addr_.toString());
chatHistory.addMessage(new ChatLeftMessage(name));
}
if(newParticipant) {
System.out.println("newParticipant");
chatHistory.addMessage(new NewParticipantMessage(name));
}
}
}
| |
// Copyright 2021 The Terasology Foundation
// SPDX-License-Identifier: Apache-2.0
package org.terasology.gestalt.di;
import com.google.common.base.Preconditions;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.Multimap;
import com.google.common.collect.Sets;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.terasology.context.AbstractBeanDefinition;
import org.terasology.context.BeanDefinition;
import org.terasology.context.EmptyAnnotationMetadata;
import org.terasology.context.Lifetime;
import org.terasology.context.exception.BeanNotFoundException;
import org.terasology.context.injection.Qualifier;
import org.terasology.gestalt.di.exceptions.BeanResolutionException;
import org.terasology.gestalt.di.instance.BeanProvider;
import org.terasology.gestalt.di.instance.ClassProvider;
import org.terasology.gestalt.di.instance.SupplierProvider;
import java.lang.reflect.Modifier;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import java.util.stream.Stream;
public class DefaultBeanContext implements AutoCloseable, BeanContext {
private static final Logger logger = LoggerFactory.getLogger(DefaultBeanContext.class);
protected final Map<BeanKey, Object> boundObjects = new HashMap<>();
protected final Map<BeanKey, BeanProvider<?>> providers = new HashMap<>();
protected final Multimap<Qualifier, BeanIntercept> beanInterceptMapping = HashMultimap.create();
private final Multimap<Qualifier, BeanKey> qualifierMapping = HashMultimap.create();
private final Multimap<Class, BeanKey> interfaceMapping = HashMultimap.create();
private final Multimap<Class, BeanKey> abstractMapping = HashMultimap.create();
private final BeanContext parent;
private final BeanEnvironment environment;
public DefaultBeanContext(BeanContext root, ServiceRegistry... registries) {
this(root, new BeanEnvironment(), registries);
}
public DefaultBeanContext(ServiceRegistry... registries) {
this(null, new BeanEnvironment(), registries);
}
public DefaultBeanContext(BeanContext parent, BeanEnvironment environment, ServiceRegistry... registries) {
Preconditions.checkArgument(parent != this, "bean context can't reference itself");
this.parent = parent;
this.environment = environment;
for (ServiceRegistry registry : registries) {
this.bindRegistry(registry);
}
}
static <T> Optional<T> bindBean(DefaultBeanContext context, BeanKey identifier, Supplier<Optional<T>> supplier) {
if (context.boundObjects.containsKey(identifier)) {
return Optional.of((T) context.boundObjects.get(identifier));
}
Optional<T> result = supplier.get();
result.ifPresent(t -> context.boundObjects.put(identifier, t));
return result;
}
private void bindRegistry(ServiceRegistry registry) {
for (ClassLoader loader : registry.classLoaders) {
this.environment.loadDefinitions(loader);
}
for (BeanScanner scanner : registry.scanners) {
scanner.apply(registry, environment);
}
for (ServiceRegistry.InstanceExpression<?> expression : registry.instanceExpressions) {
bindExpression(expression);
}
this.beanInterceptMapping.putAll(registry.intercepts);
// register self as a singleton instance that is scoped to current context
bindExpression(new ServiceRegistry.InstanceExpression<>(BeanContext.class).lifetime(Lifetime.Singleton).use(() -> this));
}
private <T> void loadAbstract(BeanKey<?> key, Class<T> clazz) {
Class parentClass = clazz;
do {
if((parentClass.getModifiers() & Modifier.ABSTRACT) > 0) {
abstractMapping.put(parentClass, key);
}
parentClass = parentClass.getSuperclass();
} while (parentClass != null);
}
private <T> void bindExpression(ServiceRegistry.InstanceExpression<T> expression) {
BeanKey<?> key = new BeanKey(expression.target)
.use(expression.root)
.qualifiedBy(expression.qualifier);
if (expression.target == expression.root) {
for (Class impl : expression.target.getInterfaces()) {
interfaceMapping.put(impl, key);
}
loadAbstract(key, expression.target);
} else {
interfaceMapping.put(expression.root, key);
}
if (expression.qualifier != null) {
qualifierMapping.put(expression.qualifier, key);
}
if (expression.supplier == null) {
providers.put(key, new ClassProvider(environment, expression.lifetime, expression.target));
} else {
providers.put(key, new SupplierProvider(environment, expression.lifetime, expression.target, expression.supplier));
}
}
@Override
public <T> T inject(T instance) {
BeanDefinition<T> definition = (BeanDefinition<T>) environment.getDefinition(instance.getClass());
if (definition instanceof AbstractBeanDefinition) {
return definition.inject(instance, new DefaultBeanResolution(this, environment)).get();
}
throw new BeanNotFoundException("unable to resolve BeanDefintion: " + instance.getClass());
}
@Override
public <T> T getBean(BeanKey<T> identifier) {
Optional<T> result = findBean(identifier);
return result.orElseThrow(() -> new BeanResolutionException(identifier));
}
@Override
public <T> Optional<T> findBean(BeanKey<T> identifier) {
Optional<BeanContext> cntx = Optional.of(this);
while (cntx.isPresent()) {
BeanContext beanContext = cntx.get();
if (beanContext instanceof DefaultBeanContext) {
DefaultBeanContext defContext = ((DefaultBeanContext) beanContext);
Optional<T> target = defContext.internalResolve(identifier, this);
if (target.isPresent()) {
return target;
}
}
cntx = cntx.get().getParent();
}
return Optional.empty();
}
private Optional<BeanKey> findConcreteBeanKey(BeanKey identifier) {
List<BeanKey> beanKeys = getBeanKeys(identifier).collect(Collectors.toList());
if (beanKeys.size() > 1) {
throw new BeanResolutionException(beanKeys);
}
return beanKeys.stream().findFirst();
}
private Stream<BeanKey> getBeanKeys(BeanKey identifier) {
Collection<BeanKey> result = null;
if (providers.containsKey(identifier)) {
return Stream.of(identifier);
}
if (identifier.qualifier != null) {
result = Sets.newHashSet(qualifierMapping.get(identifier.qualifier));
}
if (identifier.baseType.isInterface()) {
if (result != null) {
Collection<BeanKey> implementing = interfaceMapping.get(identifier.baseType);
if (implementing != null) {
result.retainAll(implementing);
}
} else {
result = Sets.newHashSet(interfaceMapping.get(identifier.baseType));
}
} else if (identifier.baseType == identifier.implementingType) {
Collection<BeanKey> implementing = interfaceMapping.get(identifier.baseType);
Collection<BeanKey> abstractImpl = abstractMapping.get(identifier.baseType);
for (Class implType : identifier.baseType.getInterfaces()) {
Collection<BeanKey> temp = interfaceMapping.get(implType);
if (temp == null || temp.size() == 0) {
continue;
}
implementing.addAll(temp.stream().filter(k -> k.baseType == identifier.baseType).collect(Collectors.toSet()));
}
if (result != null && implementing != null) {
result.retainAll(implementing);
} else if (implementing != null) {
result = implementing;
}
if (result != null && abstractImpl != null) {
result.addAll(abstractImpl);
} else if(abstractImpl != null) {
result = abstractImpl;
}
} else {
Collection<BeanKey> implementing = interfaceMapping.get(identifier.implementingType);
if (result != null && implementing != null) {
result.retainAll(implementing);
} else if (implementing != null) {
result = implementing.stream().filter(k -> k.baseType == identifier.baseType).collect(Collectors.toSet());
}
}
if (result == null) {
return Stream.of();
}
return result.stream();
}
/**
* @param identifier
* @param targetContext the context that the object is being resolve to
* @param <T>
* @return
*/
private <T> Optional<T> internalResolve(BeanKey identifier, DefaultBeanContext targetContext) {
Optional<BeanKey> key = findConcreteBeanKey(identifier);
if (identifier.annotation != EmptyAnnotationMetadata.EMPTY_ARGUMENT) {
Collection<BeanIntercept> intercept = targetContext.beanInterceptMapping.get(identifier.qualifier);
if (intercept != null) {
for (BeanIntercept inter : intercept) {
Optional<T> result = inter.single(identifier, identifier.annotation);
if (result.isPresent()) {
return result;
}
}
}
}
if (key.isPresent()) {
BeanProvider<T> provider = (BeanProvider<T>) providers.get(key.get());
switch (provider.getLifetime()) {
case Transient:
return provider.get(key.get(), this, targetContext);
case Singleton:
return DefaultBeanContext.bindBean(this, key.get(), () -> provider.get(key.get(), this, targetContext));
case Scoped:
case ScopedToChildren:
if (provider.getLifetime() == Lifetime.ScopedToChildren && targetContext == this) {
return Optional.empty();
}
return DefaultBeanContext.bindBean(targetContext, key.get(), () -> provider.get(key.get(), this, targetContext));
}
}
return Optional.empty();
}
@Override
public <T> T getBean(Class<T> clazz) {
BeanKey<T> identifier = new BeanKey<>(clazz);
return getBean(identifier);
}
@Override
public <T> Optional<T> findBean(Class<T> clazz) {
BeanKey<T> identifier = new BeanKey<>(clazz);
return findBean(identifier);
}
@Override
public <T> T getBean(Class<T> clazz, Qualifier qualifier) {
BeanKey<T> identifier = new BeanKey<>(clazz)
.qualifiedBy(qualifier);
return getBean(identifier);
}
@Override
public <T> List<T> getBeans(BeanKey<T> identifier) {
Optional<BeanContext> cntx = Optional.of(this);
Stream<T> all = Stream.of();
while (cntx.isPresent()) {
BeanContext beanContext = cntx.get();
if (beanContext instanceof DefaultBeanContext) {
DefaultBeanContext defContext = ((DefaultBeanContext) beanContext);
Stream<T> target = defContext.internalMultipleResolve(identifier, this);
all = Stream.concat(all, target);
}
cntx = cntx.get().getParent();
}
return all.collect(Collectors.toList());
}
@Override
public <T> List<T> getBeans(Class<T> clazz) {
BeanKey<T> identifier = new BeanKey<>(clazz);
return getBeans(identifier);
}
@Override
public <T> List<T> getBeans(Class<T> clazz, Qualifier qualifier) {
BeanKey<T> identifier = new BeanKey<>(clazz)
.qualifiedBy(qualifier);
return getBeans(identifier);
}
private <T> Stream<T> internalMultipleResolve(BeanKey identifier, DefaultBeanContext targetContext) {
return getBeanKeys(identifier)
.map(key -> {
Collection<BeanIntercept> intercepts = targetContext.beanInterceptMapping.get(identifier.qualifier);
for (BeanIntercept intercept : intercepts) {
Optional<T> result = intercept.single(identifier, identifier.annotation);
if (result.isPresent()) {
return result;
}
}
BeanProvider<T> provider = (BeanProvider<T>) providers.get(key);
switch (provider.getLifetime()) {
case Transient:
return provider.get(key, this, targetContext);
case Singleton:
return DefaultBeanContext.bindBean(this, key, () -> provider.get(key, this, targetContext));
case Scoped:
case ScopedToChildren:
if (provider.getLifetime() == Lifetime.ScopedToChildren && targetContext == this) {
return Optional.empty();
}
return DefaultBeanContext.bindBean(targetContext, key, () -> provider.get(key, this, targetContext));
}
return Optional.empty();
})
.filter(Optional::isPresent)
.map(Optional::get)
.map(b -> (T) b);
}
@Override
public <T> Optional<T> findBean(Class<T> clazz, Qualifier qualifier) {
BeanKey<T> identifier = new BeanKey<>(clazz)
.qualifiedBy(qualifier);
return findBean(identifier);
}
@Override
public BeanContext getNestedContainer() {
return new DefaultBeanContext(this, environment);
}
@Override
public BeanContext getNestedContainer(ServiceRegistry... registries) {
return new DefaultBeanContext(this, environment, registries);
}
@Override
public BeanEnvironment getEnvironment() {
return environment;
}
@Override
public void close() throws Exception {
for (Object o : this.boundObjects.values()) {
if (o instanceof AutoCloseable) {
try {
((AutoCloseable) o).close();
} catch (Exception e) {
logger.error("Cannot close bean [" + o.getClass() + "]", e);
throw e;
}
}
}
}
public Optional<BeanContext> getParent() {
return Optional.ofNullable(parent);
}
}
| |
/*
* Copyright 2014 Soichiro Kashima
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package demo.binea.com.parallexheaderlayout;
import android.content.Context;
import android.os.Parcel;
import android.os.Parcelable;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.util.AttributeSet;
import android.util.SparseIntArray;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
/**
* RecyclerView that its scroll position can be observed.
* Before using this, please consider to use the RecyclerView.OnScrollListener
* provided by the support library officially.
*/
public class ObservableRecyclerView extends RecyclerView implements Scrollable {
private static int recyclerViewLibraryVersion = 22;
// Fields that should be saved onSaveInstanceState
private int mPrevFirstVisiblePosition;
private int mPrevFirstVisibleChildHeight = -1;
private int mPrevScrolledChildrenHeight;
private int mPrevScrollY;
private int mScrollY;
private SparseIntArray mChildrenHeights;
// Fields that don't need to be saved onSaveInstanceState
private ObservableScrollViewCallbacks mCallbacks;
private ScrollState mScrollState;
private boolean mFirstScroll;
private boolean mDragging;
private boolean mIntercepted;
private MotionEvent mPrevMoveEvent;
private ViewGroup mTouchInterceptionViewGroup;
public ObservableRecyclerView(Context context) {
super(context);
init();
}
public ObservableRecyclerView(Context context, AttributeSet attrs) {
super(context, attrs);
init();
}
public ObservableRecyclerView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
init();
}
@Override
public void onRestoreInstanceState(Parcelable state) {
SavedState ss = (SavedState) state;
mPrevFirstVisiblePosition = ss.prevFirstVisiblePosition;
mPrevFirstVisibleChildHeight = ss.prevFirstVisibleChildHeight;
mPrevScrolledChildrenHeight = ss.prevScrolledChildrenHeight;
mPrevScrollY = ss.prevScrollY;
mScrollY = ss.scrollY;
mChildrenHeights = ss.childrenHeights;
super.onRestoreInstanceState(ss.getSuperState());
}
@Override
public Parcelable onSaveInstanceState() {
Parcelable superState = super.onSaveInstanceState();
SavedState ss = new SavedState(superState);
ss.prevFirstVisiblePosition = mPrevFirstVisiblePosition;
ss.prevFirstVisibleChildHeight = mPrevFirstVisibleChildHeight;
ss.prevScrolledChildrenHeight = mPrevScrolledChildrenHeight;
ss.prevScrollY = mPrevScrollY;
ss.scrollY = mScrollY;
ss.childrenHeights = mChildrenHeights;
return ss;
}
@Override
protected void onScrollChanged(int l, int t, int oldl, int oldt) {
super.onScrollChanged(l, t, oldl, oldt);
if (mCallbacks != null) {
if (getChildCount() > 0) {
int firstVisiblePosition = getChildAdapterPosition(getChildAt(0));
int lastVisiblePosition = getChildAdapterPosition(getChildAt(getChildCount() - 1));
for (int i = firstVisiblePosition, j = 0; i <= lastVisiblePosition; i++, j++) {
int childHeight = 0;
View child = getChildAt(j);
if (child != null) {
if (mChildrenHeights.indexOfKey(i) < 0 || (child.getHeight() != mChildrenHeights.get(i))) {
childHeight = child.getHeight();
}
}
mChildrenHeights.put(i, childHeight);
}
View firstVisibleChild = getChildAt(0);
if (firstVisibleChild != null) {
if (mPrevFirstVisiblePosition < firstVisiblePosition) {
// scroll down
int skippedChildrenHeight = 0;
if (firstVisiblePosition - mPrevFirstVisiblePosition != 1) {
for (int i = firstVisiblePosition - 1; i > mPrevFirstVisiblePosition; i--) {
if (0 < mChildrenHeights.indexOfKey(i)) {
skippedChildrenHeight += mChildrenHeights.get(i);
} else {
// Approximate each item's height to the first visible child.
// It may be incorrect, but without this, scrollY will be broken
// when scrolling from the bottom.
skippedChildrenHeight += firstVisibleChild.getHeight();
}
}
}
mPrevScrolledChildrenHeight += mPrevFirstVisibleChildHeight + skippedChildrenHeight;
mPrevFirstVisibleChildHeight = firstVisibleChild.getHeight();
} else if (firstVisiblePosition < mPrevFirstVisiblePosition) {
// scroll up
int skippedChildrenHeight = 0;
if (mPrevFirstVisiblePosition - firstVisiblePosition != 1) {
for (int i = mPrevFirstVisiblePosition - 1; i > firstVisiblePosition; i--) {
if (0 < mChildrenHeights.indexOfKey(i)) {
skippedChildrenHeight += mChildrenHeights.get(i);
} else {
// Approximate each item's height to the first visible child.
// It may be incorrect, but without this, scrollY will be broken
// when scrolling from the bottom.
skippedChildrenHeight += firstVisibleChild.getHeight();
}
}
}
mPrevScrolledChildrenHeight -= firstVisibleChild.getHeight() + skippedChildrenHeight;
mPrevFirstVisibleChildHeight = firstVisibleChild.getHeight();
} else if (firstVisiblePosition == 0) {
mPrevFirstVisibleChildHeight = firstVisibleChild.getHeight();
mPrevScrolledChildrenHeight = 0;
}
if (mPrevFirstVisibleChildHeight < 0) {
mPrevFirstVisibleChildHeight = 0;
}
mScrollY = mPrevScrolledChildrenHeight - firstVisibleChild.getTop();
mPrevFirstVisiblePosition = firstVisiblePosition;
mCallbacks.onScrollChanged(mScrollY, mFirstScroll, mDragging);
if (mFirstScroll) {
mFirstScroll = false;
}
if (mPrevScrollY < mScrollY) {
//down
mScrollState = ScrollState.UP;
} else if (mScrollY < mPrevScrollY) {
//up
mScrollState = ScrollState.DOWN;
} else {
mScrollState = ScrollState.STOP;
}
mPrevScrollY = mScrollY;
}
}
}
}
@Override
public boolean onInterceptTouchEvent(MotionEvent ev) {
if (mCallbacks != null) {
switch (ev.getActionMasked()) {
case MotionEvent.ACTION_DOWN:
// Whether or not motion events are consumed by children,
// flag initializations which are related to ACTION_DOWN events should be executed.
// Because if the ACTION_DOWN is consumed by children and only ACTION_MOVEs are
// passed to parent (this view), the flags will be invalid.
// Also, applications might implement initialization codes to onDownMotionEvent,
// so call it here.
mFirstScroll = mDragging = true;
mCallbacks.onDownMotionEvent();
break;
}
}
return super.onInterceptTouchEvent(ev);
}
@Override
public boolean onTouchEvent(MotionEvent ev) {
if (mCallbacks != null) {
switch (ev.getActionMasked()) {
case MotionEvent.ACTION_UP:
case MotionEvent.ACTION_CANCEL:
mIntercepted = false;
mDragging = false;
mCallbacks.onUpOrCancelMotionEvent(mScrollState);
break;
case MotionEvent.ACTION_MOVE:
if (mPrevMoveEvent == null) {
mPrevMoveEvent = ev;
}
float diffY = ev.getY() - mPrevMoveEvent.getY();
mPrevMoveEvent = MotionEvent.obtainNoHistory(ev);
if (getCurrentScrollY() - diffY <= 0) {
// Can't scroll anymore.
if (mIntercepted) {
// Already dispatched ACTION_DOWN event to parents, so stop here.
return false;
}
// Apps can set the interception target other than the direct parent.
final ViewGroup parent;
if (mTouchInterceptionViewGroup == null) {
parent = (ViewGroup) getParent();
} else {
parent = mTouchInterceptionViewGroup;
}
// Get offset to parents. If the parent is not the direct parent,
// we should aggregate offsets from all of the parents.
float offsetX = 0;
float offsetY = 0;
for (View v = this; v != null && v != parent; v = (View) v.getParent()) {
offsetX += v.getLeft() - v.getScrollX();
offsetY += v.getTop() - v.getScrollY();
}
final MotionEvent event = MotionEvent.obtainNoHistory(ev);
event.offsetLocation(offsetX, offsetY);
if (parent.onInterceptTouchEvent(event)) {
mIntercepted = true;
// If the parent wants to intercept ACTION_MOVE events,
// we pass ACTION_DOWN event to the parent
// as if these touch events just have began now.
event.setAction(MotionEvent.ACTION_DOWN);
// Return this onTouchEvent() first and set ACTION_DOWN event for parent
// to the queue, to keep events sequence.
post(new Runnable() {
@Override
public void run() {
parent.dispatchTouchEvent(event);
}
});
return false;
}
// Even when this can't be scrolled anymore,
// simply returning false here may cause subView's click,
// so delegate it to super.
return super.onTouchEvent(ev);
}
break;
}
}
return super.onTouchEvent(ev);
}
@Override
public void setScrollViewCallbacks(ObservableScrollViewCallbacks listener) {
mCallbacks = listener;
}
@Override
public void setTouchInterceptionViewGroup(ViewGroup viewGroup) {
mTouchInterceptionViewGroup = viewGroup;
}
@Override
public void scrollVerticallyTo(int y) {
View firstVisibleChild = getChildAt(0);
if (firstVisibleChild != null) {
int baseHeight = firstVisibleChild.getHeight();
int position = y / baseHeight;
scrollVerticallyToPosition(position);
}
}
/**
* <p>Same as {@linkplain #scrollToPosition(int)} but it scrolls to the position not only make
* the position visible.</p>
* <p>It depends on {@code LayoutManager} how {@linkplain #scrollToPosition(int)} works,
* and currently we know that {@linkplain LinearLayoutManager#scrollToPosition(int)} just
* make the position visible.</p>
* <p>In LinearLayoutManager, scrollToPositionWithOffset() is provided for scrolling to the position.
* This method checks which LayoutManager is set,
* and handles which method should be called for scrolling.</p>
* <p>Other know classes (StaggeredGridLayoutManager and GridLayoutManager) are not tested.</p>
*
* @param position position to scroll
*/
public void scrollVerticallyToPosition(int position) {
LayoutManager lm = getLayoutManager();
if (lm != null && lm instanceof LinearLayoutManager) {
((LinearLayoutManager) lm).scrollToPositionWithOffset(position, 0);
} else {
scrollToPosition(position);
}
}
@Override
public int getCurrentScrollY() {
return mScrollY;
}
@SuppressWarnings("deprecation")
public int getChildAdapterPosition(View child) {
if (22 <= recyclerViewLibraryVersion) {
return super.getChildAdapterPosition(child);
}
return getChildPosition(child);
}
private void init() {
mChildrenHeights = new SparseIntArray();
checkLibraryVersion();
}
private void checkLibraryVersion() {
try {
super.getChildAdapterPosition(null);
} catch (NoSuchMethodError e) {
recyclerViewLibraryVersion = 21;
}
}
/**
* This saved state class is a Parcelable and should not extend
* {@link android.view.View.BaseSavedState} nor {@link android.view.AbsSavedState}
* because its super class AbsSavedState's constructor
* {@link android.view.AbsSavedState#AbsSavedState(Parcel)} currently passes null
* as a class loader to read its superstate from Parcelable.
* This causes {@link android.os.BadParcelableException} when restoring saved states.
* <p/>
* The super class "RecyclerView" is a part of the support library,
* and restoring its saved state requires the class loader that loaded the RecyclerView.
* It seems that the class loader is not required when restoring from RecyclerView itself,
* but it is required when restoring from RecyclerView's subclasses.
*/
static class SavedState implements Parcelable {
public static final SavedState EMPTY_STATE = new SavedState() {
};
int prevFirstVisiblePosition;
int prevFirstVisibleChildHeight = -1;
int prevScrolledChildrenHeight;
int prevScrollY;
int scrollY;
SparseIntArray childrenHeights;
// This keeps the parent(RecyclerView)'s state
Parcelable superState;
/**
* Called by EMPTY_STATE instantiation.
*/
private SavedState() {
superState = null;
}
/**
* Called by onSaveInstanceState.
*/
SavedState(Parcelable superState) {
this.superState = superState != EMPTY_STATE ? superState : null;
}
/**
* Called by CREATOR.
*/
private SavedState(Parcel in) {
// Parcel 'in' has its parent(RecyclerView)'s saved state.
// To restore it, class loader that loaded RecyclerView is required.
Parcelable superState = in.readParcelable(RecyclerView.class.getClassLoader());
this.superState = superState != null ? superState : EMPTY_STATE;
prevFirstVisiblePosition = in.readInt();
prevFirstVisibleChildHeight = in.readInt();
prevScrolledChildrenHeight = in.readInt();
prevScrollY = in.readInt();
scrollY = in.readInt();
childrenHeights = new SparseIntArray();
final int numOfChildren = in.readInt();
if (0 < numOfChildren) {
for (int i = 0; i < numOfChildren; i++) {
final int key = in.readInt();
final int value = in.readInt();
childrenHeights.put(key, value);
}
}
}
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(Parcel out, int flags) {
out.writeParcelable(superState, flags);
out.writeInt(prevFirstVisiblePosition);
out.writeInt(prevFirstVisibleChildHeight);
out.writeInt(prevScrolledChildrenHeight);
out.writeInt(prevScrollY);
out.writeInt(scrollY);
final int numOfChildren = childrenHeights == null ? 0 : childrenHeights.size();
out.writeInt(numOfChildren);
if (0 < numOfChildren) {
for (int i = 0; i < numOfChildren; i++) {
out.writeInt(childrenHeights.keyAt(i));
out.writeInt(childrenHeights.valueAt(i));
}
}
}
public Parcelable getSuperState() {
return superState;
}
public static final Parcelable.Creator<SavedState> CREATOR
= new Parcelable.Creator<SavedState>() {
@Override
public SavedState createFromParcel(Parcel in) {
return new SavedState(in);
}
@Override
public SavedState[] newArray(int size) {
return new SavedState[size];
}
};
}
}
| |
/**
* Licensed to Apereo under one or more contributor license
* agreements. See the NOTICE file distributed with this work
* for additional information regarding copyright ownership.
* Apereo licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a
* copy of the License at the following location:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apereo.portal.portlet.dao.trans;
import java.util.LinkedHashSet;
import java.util.Set;
import javax.servlet.http.HttpServletRequest;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apereo.portal.IUserPreferencesManager;
import org.apereo.portal.layout.IUserLayoutManager;
import org.apereo.portal.layout.TransientUserLayoutManagerWrapper;
import org.apereo.portal.layout.node.IUserLayoutChannelDescription;
import org.apereo.portal.portlet.dao.IPortletEntityDao;
import org.apereo.portal.portlet.om.IPortletDefinition;
import org.apereo.portal.portlet.om.IPortletDefinitionId;
import org.apereo.portal.portlet.om.IPortletEntity;
import org.apereo.portal.portlet.om.IPortletEntityId;
import org.apereo.portal.portlet.registry.IPortletDefinitionRegistry;
import org.apereo.portal.url.IPortalRequestUtils;
import org.apereo.portal.user.IUserInstance;
import org.apereo.portal.user.IUserInstanceManager;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Repository;
/**
* Handles entity management for transient portlets, defined as portlets that aren't permanent parts of the user's layout. Portlet
* preferences are still persisted but are associated with a transient rendering of the portlet. Transient portlets are
* detected by checking the channel subscribe ID against the {@link TransientUserLayoutManagerWrapper#SUBSCRIBE_PREFIX}
* prefix.
*
* @author Eric Dalquist
* @version $Revision$
*/
@Repository
@Qualifier("transient")
public class TransientPortletEntityDao implements IPortletEntityDao {
protected final Log logger = LogFactory.getLog(this.getClass());
private IPortletDefinitionRegistry portletDefinitionRegistry;
private IPortletEntityDao delegatePortletEntityDao;
private IUserInstanceManager userInstanceManager;
private IPortalRequestUtils portalRequestUtils;
/**
* The IPortletEntityDao to delegate calls to for actual persistence
*/
@Autowired
public void setDelegatePortletEntityDao(@Qualifier("persistence") IPortletEntityDao delegatePortletEntityDao) {
this.delegatePortletEntityDao = delegatePortletEntityDao;
}
/**
* Registry for looking up data related to portlet definitions
*/
@Autowired
public void setPortletDefinitionRegistry(IPortletDefinitionRegistry portletDefinitionRegistry) {
this.portletDefinitionRegistry = portletDefinitionRegistry;
}
/**
* Used to get access to the user's layout manager
*/
@Autowired
public void setUserInstanceManager(IUserInstanceManager userInstanceManager) {
this.userInstanceManager = userInstanceManager;
}
/**
* Used to get access to the current portal request
*/
@Autowired
public void setPortalRequestUtils(IPortalRequestUtils portalRequestUtils) {
this.portalRequestUtils = portalRequestUtils;
}
/* (non-Javadoc)
* @see org.apereo.portal.portlet.dao.IPortletEntityDao#createPortletEntity(org.apereo.portal.portlet.om.IPortletDefinitionId, java.lang.String, int)
*/
@Override
public IPortletEntity createPortletEntity(IPortletDefinitionId portletDefinitionId, String layoutNodeId, int userId) {
if (layoutNodeId.startsWith(TransientUserLayoutManagerWrapper.SUBSCRIBE_PREFIX)) {
final String transientLayoutNodeId = layoutNodeId;
layoutNodeId = this.getPersistentLayoutNodeId(portletDefinitionId);
final IPortletEntity portletEntity = this.delegatePortletEntityDao.createPortletEntity(portletDefinitionId, layoutNodeId, userId);
return new TransientPortletEntity(portletEntity, transientLayoutNodeId);
}
return this.delegatePortletEntityDao.createPortletEntity(portletDefinitionId, layoutNodeId, userId);
}
/* (non-Javadoc)
* @see org.apereo.portal.portlet.dao.IPortletEntityDao#deletePortletEntity(org.apereo.portal.portlet.om.IPortletEntity)
*/
@Override
public void deletePortletEntity(IPortletEntity portletEntity) {
portletEntity = this.unwrapEntity(portletEntity);
this.delegatePortletEntityDao.deletePortletEntity(portletEntity);
}
/* (non-Javadoc)
* @see org.apereo.portal.portlet.dao.IPortletEntityDao#getPortletEntities(org.apereo.portal.portlet.om.IPortletDefinitionId)
*/
@Override
public Set<IPortletEntity> getPortletEntities(IPortletDefinitionId portletDefinitionId) {
final Set<IPortletEntity> portletEntities = this.delegatePortletEntityDao.getPortletEntities(portletDefinitionId);
return this.wrapEntities(portletEntities);
}
/* (non-Javadoc)
* @see org.apereo.portal.portlet.dao.IPortletEntityDao#getPortletEntitiesForUser(int)
*/
@Override
public Set<IPortletEntity> getPortletEntitiesForUser(int userId) {
final Set<IPortletEntity> portletEntities = this.delegatePortletEntityDao.getPortletEntitiesForUser(userId);
return this.wrapEntities(portletEntities);
}
/* (non-Javadoc)
* @see org.apereo.portal.portlet.dao.IPortletEntityDao#getPortletEntity(org.apereo.portal.portlet.om.IPortletEntityId)
*/
@Override
public IPortletEntity getPortletEntity(IPortletEntityId portletEntityId) {
final IPortletEntity portletEntity = this.delegatePortletEntityDao.getPortletEntity(portletEntityId);
return this.wrapEntity(portletEntity);
}
@Override
public boolean portletEntityExists(IPortletEntityId portletEntityId) {
return this.delegatePortletEntityDao.portletEntityExists(portletEntityId);
}
/* (non-Javadoc)
* @see org.apereo.portal.portlet.dao.IPortletEntityDao#getPortletEntity(java.lang.String, int)
*/
@Override
public IPortletEntity getPortletEntity(String layoutNodeId, int userId) {
if (layoutNodeId.startsWith(TransientUserLayoutManagerWrapper.SUBSCRIBE_PREFIX)) {
final String databaseChannelSubscribeId = this.determineDatabaseChannelSubscribeId(layoutNodeId);
final IPortletEntity portletEntity = this.delegatePortletEntityDao.getPortletEntity(databaseChannelSubscribeId, userId);
return this.wrapEntity(portletEntity);
}
return this.delegatePortletEntityDao.getPortletEntity(layoutNodeId, userId);
}
/* (non-Javadoc)
* @see org.apereo.portal.portlet.dao.IPortletEntityDao#updatePortletEntity(org.apereo.portal.portlet.om.IPortletEntity)
*/
@Override
public void updatePortletEntity(IPortletEntity portletEntity) {
portletEntity = this.unwrapEntity(portletEntity);
this.delegatePortletEntityDao.updatePortletEntity(portletEntity);
}
/**
* Returns the unwrapped entity if it is an instance of TransientPortletEntity. If not the original entity is
* returned.
*/
protected IPortletEntity unwrapEntity(IPortletEntity portletEntity) {
if (portletEntity instanceof TransientPortletEntity) {
return ((TransientPortletEntity)portletEntity).getDelegatePortletEntity();
}
return portletEntity;
}
/**
* Adds a TransientPortletEntity wrapper to the portletEntity if it is needed. If the specified entity is transient
* but no transient subscribe id has been registered for it yet in the transientIdMap null is returned. If no
* wrapping is needed the original entity is returned.
*/
protected IPortletEntity wrapEntity(IPortletEntity portletEntity) {
if (portletEntity == null) {
return null;
}
final String persistentLayoutNodeId = portletEntity.getLayoutNodeId();
if (persistentLayoutNodeId.startsWith(TransientUserLayoutManagerWrapper.SUBSCRIBE_PREFIX)) {
final IUserLayoutManager userLayoutManager = this.getUserLayoutManager();
if (userLayoutManager == null) {
this.logger.warn("Could not find IUserLayoutManager when trying to wrap transient portlet entity: " + portletEntity);
return portletEntity;
}
final IPortletDefinition portletDefinition = portletEntity.getPortletDefinition();
final String fname = portletDefinition.getFName();
final String layoutNodeId = userLayoutManager.getSubscribeId(fname);
return new TransientPortletEntity(portletEntity, layoutNodeId);
}
return portletEntity;
}
/**
* Calles {@link #wrapEntity(IPortletEntity)} on each entry in the Set, wrap calls that return null result in the
* entitiy being dropped from the returned Set
*/
protected Set<IPortletEntity> wrapEntities(Set<IPortletEntity> portletEntities) {
final Set<IPortletEntity> wrappedPortletEntities = new LinkedHashSet<IPortletEntity>(portletEntities.size());
for (final IPortletEntity portletEntity : portletEntities) {
final IPortletEntity wrappedEntity = this.wrapEntity(portletEntity);
if (wrappedEntity != null) {
wrappedPortletEntities.add(wrappedEntity);
}
}
return wrappedPortletEntities;
}
protected String determineDatabaseChannelSubscribeId(String layoutNodeId) {
//Find the referenced Node in the user's layout
final IUserLayoutManager userLayoutManager = this.getUserLayoutManager();
final IUserLayoutChannelDescription channelNode = (IUserLayoutChannelDescription)userLayoutManager.getNode(layoutNodeId);
//Lookup the IportletDefinition for the node
final String portletPublishId = channelNode.getChannelPublishId();
final IPortletDefinition portletDefinition = this.portletDefinitionRegistry.getPortletDefinition(portletPublishId);
//Generate the subscribe ID used for the database
return this.getPersistentLayoutNodeId(portletDefinition.getPortletDefinitionId());
}
protected String getPersistentLayoutNodeId(IPortletDefinitionId portletDefinitionId) {
return TransientUserLayoutManagerWrapper.SUBSCRIBE_PREFIX + "." + portletDefinitionId.getStringId();
}
protected IUserLayoutManager getUserLayoutManager() {
final HttpServletRequest portalRequest = this.portalRequestUtils.getCurrentPortalRequest();
final IUserInstance userInstance = this.userInstanceManager.getUserInstance(portalRequest);
final IUserPreferencesManager preferencesManager = userInstance.getPreferencesManager();
return preferencesManager.getUserLayoutManager();
}
}
| |
package com.dianping.cat.service;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentHashMap;
import org.unidal.dal.jdbc.DalException;
import org.unidal.lookup.annotation.Inject;
import org.unidal.lookup.extension.Initializable;
import org.unidal.lookup.extension.InitializationException;
import com.dianping.cat.Cat;
import com.dianping.cat.config.server.ServerConfigManager;
import com.dianping.cat.core.dal.Project;
import com.dianping.cat.core.dal.ProjectDao;
import com.dianping.cat.core.dal.ProjectEntity;
public class ProjectService implements Initializable {
@Inject
private ProjectDao m_projectDao;
@Inject
private ServerConfigManager m_manager;
private ConcurrentHashMap<String, String> m_domains = new ConcurrentHashMap<String, String>();
private ConcurrentHashMap<String, Project> m_domainToProjects = new ConcurrentHashMap<String, Project>();
private ConcurrentHashMap<String, Project> m_cmdbToProjects = new ConcurrentHashMap<String, Project>();
public static final String DEFAULT = "Default";
public boolean contains(String domain) {
return m_domains.containsKey(domain);
}
public Project create() {
return m_projectDao.createLocal();
}
public boolean delete(Project project) {
int id = project.getId();
String domainName = null;
for (Entry<String, Project> entry : m_domainToProjects.entrySet()) {
Project pro = entry.getValue();
if (pro.getId() == id) {
domainName = pro.getDomain();
break;
}
}
try {
m_projectDao.deleteByPK(project);
m_domainToProjects.remove(domainName);
m_cmdbToProjects.remove(project.getCmdbDomain());
return true;
} catch (Exception e) {
Cat.logError("delete project error ", e);
return false;
}
}
public List<Project> findAll() throws DalException {
return new ArrayList<Project>(m_domainToProjects.values());
}
public Project findByDomain(String domainName) {
Project project = m_domainToProjects.get(domainName);
if (project != null) {
return project;
} else {
try {
Project pro = m_projectDao.findByDomain(domainName, ProjectEntity.READSET_FULL);
m_domainToProjects.put(pro.getDomain(), pro);
return project;
} catch (DalException e) {
} catch (Exception e) {
Cat.logError(e);
}
return null;
}
}
public Map<String, Department> findDepartments(Collection<String> domains) {
Map<String, Department> departments = new TreeMap<String, Department>();
for (String domain : domains) {
Project project = findProject(domain);
String department = DEFAULT;
String projectLine = DEFAULT;
if (project != null) {
String bu = project.getBu();
String productline = project.getCmdbProductline();
department = bu == null ? DEFAULT : bu;
projectLine = productline == null ? DEFAULT : productline;
}
Department temp = departments.get(department);
if (temp == null) {
temp = new Department();
departments.put(department, temp);
}
temp.findOrCreatProjectLine(projectLine).addDomain(domain);
}
return departments;
}
public Project findProject(String domain) {
Project project = m_domainToProjects.get(domain);
if (project == null) {
project = m_cmdbToProjects.get(domain);
}
return project;
}
@Override
public void initialize() throws InitializationException {
if (!m_manager.isLocalMode()) {
refresh();
}
}
public boolean insert(Project project) throws DalException {
m_domainToProjects.put(project.getDomain(), project);
int result = m_projectDao.insert(project);
if (result == 1) {
return true;
} else {
return false;
}
}
public boolean insert(String domain) {
Project project = create();
project.setDomain(domain);
project.setCmdbProductline(DEFAULT);
project.setBu(DEFAULT);
try {
insert(project);
m_domains.put(domain, domain);
return true;
} catch (Exception ex) {
Cat.logError(ex);
}
return false;
}
protected void refresh() {
try {
List<Project> projects = m_projectDao.findAll(ProjectEntity.READSET_FULL);
ConcurrentHashMap<String, Project> tmpDomainProjects = new ConcurrentHashMap<String, Project>();
ConcurrentHashMap<String, Project> tmpCmdbProjects = new ConcurrentHashMap<String, Project>();
ConcurrentHashMap<String, String> tmpDomains = new ConcurrentHashMap<String, String>();
for (Project project : projects) {
String domain = project.getDomain();
tmpDomains.put(domain, domain);
tmpDomainProjects.put(domain, project);
String cmdb = project.getCmdbDomain();
if (cmdb != null) {
tmpCmdbProjects.put(cmdb, project);
}
}
m_domains = tmpDomains;
m_domainToProjects = tmpDomainProjects;
m_cmdbToProjects = tmpCmdbProjects;
} catch (DalException e) {
Cat.logError("initialize ProjectService error", e);
}
}
public boolean update(Project project) {
m_domainToProjects.put(project.getDomain(), project);
try {
m_projectDao.updateByPK(project, ProjectEntity.UPDATESET_FULL);
return true;
} catch (DalException e) {
Cat.logError(e);
return false;
}
}
public static class Department {
private Map<String, ProjectLine> m_projectLines = new TreeMap<String, ProjectLine>();
public ProjectLine findOrCreatProjectLine(String projectLine) {
ProjectLine line = m_projectLines.get(String.valueOf(projectLine));
if (line == null) {
line = new ProjectLine();
m_projectLines.put(projectLine, line);
}
return line;
}
public Map<String, ProjectLine> getProjectLines() {
return m_projectLines;
}
}
public static class ProjectLine {
private List<String> m_lineDomains = new ArrayList<String>();
public void addDomain(String name) {
m_lineDomains.add(name);
}
public List<String> getLineDomains() {
return m_lineDomains;
}
}
}
| |
/*
* Abora-Gold
* Part of the Abora hypertext project: http://www.abora.org
* Copyright 2003, 2005 David G Jones
*
* Translated from Udanax-Gold source code: http://www.udanax.com
* Copyright 1979-1999 Udanax.com. All rights reserved
*/
package info.dgjones.abora.gold.nkernel;
import info.dgjones.abora.gold.be.basic.BeGrandMap;
import info.dgjones.abora.gold.be.basic.ID;
import info.dgjones.abora.gold.cobbler.Cookbook;
import info.dgjones.abora.gold.collection.basic.PrimIntArray;
import info.dgjones.abora.gold.collection.basic.UInt8Array;
import info.dgjones.abora.gold.collection.steppers.Stepper;
import info.dgjones.abora.gold.collection.steppers.TableStepper;
import info.dgjones.abora.gold.fm.support.Thunk;
import info.dgjones.abora.gold.id.IDRegion;
import info.dgjones.abora.gold.java.AboraSupport;
import info.dgjones.abora.gold.java.exception.PasseException;
import info.dgjones.abora.gold.java.missing.PackOBits;
import info.dgjones.abora.gold.java.missing.smalltalk.Set;
import info.dgjones.abora.gold.nadmin.FeLockSmith;
import info.dgjones.abora.gold.nadmin.FeSession;
import info.dgjones.abora.gold.nkernel.FeAdminer;
import info.dgjones.abora.gold.nkernel.FeKeyMaster;
import info.dgjones.abora.gold.rcmain.ServerLoop;
import info.dgjones.abora.gold.snarf.DiskManager;
import info.dgjones.abora.gold.tumbler.Sequence;
import info.dgjones.abora.gold.xcvr.Rcvr;
import info.dgjones.abora.gold.xcvr.TextyXcvrMaker;
import info.dgjones.abora.gold.xcvr.TransferSpecialist;
import info.dgjones.abora.gold.xcvr.XnReadStream;
import info.dgjones.abora.gold.xpp.basic.Heaper;
/**
* A client interface for system administration operations. This object can only be obtained
* using a KeyMaster that has System Admin authority.
*/
public class FeAdminer extends Heaper {
protected FeKeyMaster myAdminKM;
/*
udanax-top.st:19045:
Heaper subclass: #FeAdminer
instanceVariableNames: 'myAdminKM {FeKeyMaster}'
classVariableNames: ''
poolDictionaries: ''
category: 'Xanadu-nkernel'!
*/
/*
udanax-top.st:19049:
FeAdminer comment:
'A client interface for system administration operations. This object can only be obtained using a KeyMaster that has System Admin authority. '!
*/
/*
udanax-top.st:19051:
(FeAdminer getOrMakeCxxClassDescription)
attributes: ((Set new) add: #CONCRETE; add: #ON.CLIENT; add: #EQ; yourself)!
*/
/*
udanax-top.st:19173:
FeAdminer class
instanceVariableNames: ''!
*/
/*
udanax-top.st:19176:
(FeAdminer getOrMakeCxxClassDescription)
attributes: ((Set new) add: #CONCRETE; add: #ON.CLIENT; add: #EQ; yourself)!
*/
public static void initializeClassAttributes() {
AboraSupport.findAboraClass(FeAdminer.class).setAttributes( new Set().add("CONCRETE").add("ONCLIENT").add("EQ"));
/*
Generated during transformation: AddMethod
*/
}
/**
* Essential. Enable or disable the ability of the Server to accept communications
* connections from client machines. Anyone who has received a GateKeeper or Server object
* will continue to stay connected, but no new such objects will be handed out
*/
public void acceptConnections(boolean open) {
((BeGrandMap) CurrentGrandMap.fluidGet()).acceptConnections(open);
/*
udanax-top.st:19056:FeAdminer methodsFor: 'administrivia'!
{void CLIENT} acceptConnections: open {BooleanVar}
"Essential. Enable or disable the ability of the Server to accept communications connections from client machines. Anyone who has received a GateKeeper or Server object will continue to stay connected, but no new such objects will be handed out"
CurrentGrandMap fluidGet acceptConnections: open!
*/
}
/**
* Essential. Return a list of all active sessions.
*/
public Stepper activeSessions() {
return FeSession.allActive();
/*
udanax-top.st:19061:FeAdminer methodsFor: 'administrivia'!
{Stepper CLIENT of: FeSession} activeSessions
"Essential. Return a list of all active sessions."
^FeSession allActive!
*/
}
/**
* Essential. Execute a sequence of server configuration commands.
*/
public void execute(PrimIntArray commands) {
Rcvr rc;
Heaper next;
Someone.knownBug();
/* only accepts UInt8Arrays */
rc = TextyXcvrMaker.make().makeRcvr((TransferSpecialist.make((Cookbook.makeString("boot")))), (XnReadStream.make(((UInt8Array) commands))));
next = rc.receiveHeaper();
while (next != null) {
if (next instanceof Thunk) {
Thunk thunk = (Thunk) next;
thunk.execute();
}
next = rc.receiveHeaper();
}
rc.destroy();
/*
udanax-top.st:19066:FeAdminer methodsFor: 'administrivia'!
{void CLIENT} execute: commands {PrimIntArray}
"Essential. Execute a sequence of server configuration commands."
| rc {Rcvr} next {Heaper | NULL} |
self knownBug. "only accepts UInt8Arrays"
rc := TextyXcvrMaker make
makeRcvr: (TransferSpecialist make: (Cookbook make.String: 'boot'))
with: (XnReadStream make: (commands cast: UInt8Array)).
next := rc receiveHeaper.
[next ~~ NULL] whileTrue:
[next cast: Thunk into: [:thunk | thunk execute] others: [].
next := rc receiveHeaper].
rc destroy!
*/
}
/**
* Essential. Grant a Club the authority to assign global IDs on this Server.
*/
public void grant(ID clubID, IDRegion globalIDs) {
((BeGrandMap) CurrentGrandMap.fluidGet()).grant(clubID, globalIDs);
/*
udanax-top.st:19080:FeAdminer methodsFor: 'administrivia'!
{void CLIENT} grant: clubID {ID} with: globalIDs {IDRegion}
"Essential. Grant a Club the authority to assign global IDs on this Server."
CurrentGrandMap fluidGet grant: clubID with: globalIDs!
*/
}
/**
* Essential. List who has been granted authority to various regions of the global IDSpace on
* this Server.
*/
public TableStepper grants(IDRegion clubIDs, IDRegion globalIDs) {
return ((BeGrandMap) CurrentGrandMap.fluidGet()).grants(clubIDs, globalIDs);
/*
udanax-top.st:19085:FeAdminer methodsFor: 'administrivia'!
{TableStepper CLIENT of: ID and: IDRegion} grants: clubIDs {IDRegion default: NULL}
with: globalIDs {IDRegion default: NULL}
"Essential. List who has been granted authority to various regions of the global IDSpace on this Server."
^CurrentGrandMap fluidGet grants: clubIDs with: globalIDs!
*/
}
/**
* Essential. Whether the Server is accepting communications connections from client
* machines.
*/
public boolean isAcceptingConnections() {
return ((BeGrandMap) CurrentGrandMap.fluidGet()).isAcceptingConnections();
/*
udanax-top.st:19091:FeAdminer methodsFor: 'administrivia'!
{BooleanVar CLIENT} isAcceptingConnections
"Essential. Whether the Server is accepting communications connections from client machines. "
^CurrentGrandMap fluidGet isAcceptingConnections!
*/
}
/**
* Essential. Shutdown the Server immediately, taking down all the connections and writing
* all current changes to disk.
*/
public void shutdown() {
((DiskManager) CurrentPacker.fluidFetch()).purge();
ServerLoop.scheduleTermination();
/*
udanax-top.st:19096:FeAdminer methodsFor: 'administrivia'!
{void CLIENT} shutdown
"Essential. Shutdown the Server immediately, taking down all the connections and writing all current changes to disk."
[DiskManager] USES.
CurrentPacker fluidFetch purge.
ServerLoop scheduleTermination.!
*/
}
/**
* @deprecated
*/
public void clearProfile() {
throw new PasseException();
/*
udanax-top.st:19105:FeAdminer methodsFor: 'smalltalk: passe'!
{void} clearProfile
self passe "rc file"!
*/
}
/**
* @deprecated
*/
public void consistencyCheck() {
throw new PasseException();
/*
udanax-top.st:19109:FeAdminer methodsFor: 'smalltalk: passe'!
{void} consistencyCheck
self passe "rc file"!
*/
}
/**
* @deprecated
*/
public FeLockSmith defaultLockSmith() {
throw new PasseException();
/*
udanax-top.st:19113:FeAdminer methodsFor: 'smalltalk: passe'!
{FeLockSmith} defaultLockSmith
self passe!
*/
}
/**
* Disable login access to a Club, by revoking its direct membership of the System Access
* Club
* @deprecated
*/
public void disableAccess(ID clubID) {
throw new PasseException();
/*
udanax-top.st:19117:FeAdminer methodsFor: 'smalltalk: passe'!
{void} disableAccess: clubID {ID}
"Disable login access to a Club, by revoking its direct membership of the System Access Club"
self passe. "see FeServer"!
*/
}
/**
* @deprecated
*/
public void enableAccess(ID clubID) {
throw new PasseException();
/*
udanax-top.st:19122:FeAdminer methodsFor: 'smalltalk: passe'!
{void} enableAccess: clubID {ID}
self passe. "see FeServer"!
*/
}
/**
* @deprecated
*/
public void nameClub(Sequence name, ID clubID) {
throw new PasseException();
/*
udanax-top.st:19125:FeAdminer methodsFor: 'smalltalk: passe'!
{void} nameClub: name {Sequence} with: clubID {ID}
self passe. "see FeServer"!
*/
}
/**
* @deprecated
*/
public void renameClub(PackOBits oldName, PackOBits newName) {
throw new PasseException();
/*
udanax-top.st:19128:FeAdminer methodsFor: 'smalltalk: passe'!
{void} renameClub: oldName {PackOBits} with: newName {PackOBits}
self passe. "see FeServer"!
*/
}
/**
* @deprecated
*/
public void setDefaultLockSmith(FeLockSmith lockSmith) {
throw new PasseException();
/*
udanax-top.st:19131:FeAdminer methodsFor: 'smalltalk: passe'!
{void} setDefaultLockSmith: lockSmith {FeLockSmith}
self passe!
*/
}
/**
* @deprecated
*/
public void shutDown() {
throw new PasseException();
/*
udanax-top.st:19135:FeAdminer methodsFor: 'smalltalk: passe'!
{void} shutDown
self passe "shutdown"!
*/
}
/**
* @deprecated
*/
public void unnameClub(PackOBits name) {
throw new PasseException();
/*
udanax-top.st:19139:FeAdminer methodsFor: 'smalltalk: passe'!
{void} unnameClub: name {PackOBits}
self passe. "see FeServer"!
*/
}
/**
* @deprecated
*/
public void writeProfile() {
throw new PasseException();
/*
udanax-top.st:19142:FeAdminer methodsFor: 'smalltalk: passe'!
{void} writeProfile
self passe "rc file"!
*/
}
/**
* Essential. The LockSmith which hands out locks when a client tries to login through the
* GateKeeper with an invalid Club ID or name.
*/
public FeLockSmith gateLockSmith() {
return (FeLockSmith) (FeLockSmith.spec().wrap(((BeGrandMap) CurrentGrandMap.fluidGet()).gateLockSmithEdition()));
/*
udanax-top.st:19148:FeAdminer methodsFor: 'security'!
{FeLockSmith CLIENT} gateLockSmith
"Essential. The LockSmith which hands out locks when a client tries to login through the GateKeeper with an invalid Club ID or name."
[BeGrandMap] USES.
^(FeLockSmith spec wrap: CurrentGrandMap fluidGet gateLockSmithEdition) cast: FeLockSmith!
*/
}
/**
* Essential. Set the LockSmith which creates locks to hand out when a client tries to login
* with an invalid Club ID or name through the GateKeeper.
*/
public void setGateLockSmith(FeLockSmith lockSmith) {
((BeGrandMap) CurrentGrandMap.fluidFetch()).setGateLockSmithEdition(lockSmith.edition());
/*
udanax-top.st:19153:FeAdminer methodsFor: 'security'!
{void CLIENT} setGateLockSmith: lockSmith {FeLockSmith}
"Essential. Set the LockSmith which creates locks to hand out when a client tries to login with an invalid Club ID or name through the GateKeeper."
[BeGrandMap] USES.
CurrentGrandMap fluidFetch setGateLockSmithEdition: lockSmith edition!
*/
}
public TableStepper grants() {
return grants(null, null);
/*
udanax-top.st:19160:FeAdminer methodsFor: 'smalltalk: defaults'!
{TableStepper CLIENT of: ID and: IDRegion} grants
^self grants: NULL with: NULL!
*/
}
public TableStepper grants(IDRegion clubIDs) {
return grants(clubIDs, null);
/*
udanax-top.st:19163:FeAdminer methodsFor: 'smalltalk: defaults'!
{TableStepper CLIENT of: ID and: IDRegion} grants: clubIDs {IDRegion default: NULL}
^self grants: clubIDs with: NULL!
*/
}
public int actualHashForEqual() {
return asOop();
/*
udanax-top.st:19168:FeAdminer methodsFor: 'generated:'!
actualHashForEqual ^self asOop!
*/
}
public boolean isEqual(Heaper other) {
return this == other;
/*
udanax-top.st:19170:FeAdminer methodsFor: 'generated:'!
isEqual: other ^self == other!
*/
}
public static FeAdminer make() {
FeKeyMaster.assertAdminAuthority();
return new FeAdminer();
/*
udanax-top.st:19181:FeAdminer class methodsFor: 'create'!
{FeAdminer CLIENT} make
FeKeyMaster assertAdminAuthority.
^self create!
*/
}
/**
* {void CLIENT} acceptConnections: open {BooleanVar}
* {Stepper CLIENT of: FeSession} activeSessions
* {void CLIENT} execute: commands {PrimIntegerArray}
* {FeLockSmith CLIENT} gateLockSmith
* {void CLIENT} grant: clubID {ID} with: globalIDs {IDRegion}
* {TableStepper CLIENT of: ID and: IDRegion} grants
* {TableStepper CLIENT of: ID and: IDRegion} grants: clubIDs {IDRegion default: NULL}
* {TableStepper CLIENT of: ID and: IDRegion} grants: clubIDs {IDRegion default: NULL} with:
* globalIDs {IDRegion default: NULL}
* {BooleanVar CLIENT} isAcceptingConnections
* {void CLIENT} setGateLockSmith: lockSmith {FeLockSmith}
* {void CLIENT} shutDown
*/
public static void infostProtocol() {
/*
udanax-top.st:19188:FeAdminer class methodsFor: 'smalltalk: system'!
info.stProtocol
"{void CLIENT} acceptConnections: open {BooleanVar}
{Stepper CLIENT of: FeSession} activeSessions
{void CLIENT} execute: commands {PrimIntegerArray}
{FeLockSmith CLIENT} gateLockSmith
{void CLIENT} grant: clubID {ID} with: globalIDs {IDRegion}
{TableStepper CLIENT of: ID and: IDRegion} grants
{TableStepper CLIENT of: ID and: IDRegion} grants: clubIDs {IDRegion default: NULL}
{TableStepper CLIENT of: ID and: IDRegion} grants: clubIDs {IDRegion default: NULL} with: globalIDs {IDRegion default: NULL}
{BooleanVar CLIENT} isAcceptingConnections
{void CLIENT} setGateLockSmith: lockSmith {FeLockSmith}
{void CLIENT} shutDown
"!
*/
}
public FeAdminer() {
/*
Generated during transformation
*/
}
public FeAdminer(Rcvr receiver) {
super(receiver);
/*
Generated during transformation
*/
}
}
| |
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
// Copyright (c) 2011, 2012 Open Networking Foundation
// Copyright (c) 2012, 2013 Big Switch Networks, Inc.
// This library was generated by the LoxiGen Compiler.
// See the file LICENSE.txt which should have been included in the source distribution
// Automatically generated by LOXI from template of_class.java
// Do not modify
package org.projectfloodlight.openflow.protocol.ver14;
import org.projectfloodlight.openflow.protocol.*;
import org.projectfloodlight.openflow.protocol.action.*;
import org.projectfloodlight.openflow.protocol.actionid.*;
import org.projectfloodlight.openflow.protocol.bsntlv.*;
import org.projectfloodlight.openflow.protocol.errormsg.*;
import org.projectfloodlight.openflow.protocol.meterband.*;
import org.projectfloodlight.openflow.protocol.instruction.*;
import org.projectfloodlight.openflow.protocol.instructionid.*;
import org.projectfloodlight.openflow.protocol.match.*;
import org.projectfloodlight.openflow.protocol.oxm.*;
import org.projectfloodlight.openflow.protocol.queueprop.*;
import org.projectfloodlight.openflow.types.*;
import org.projectfloodlight.openflow.util.*;
import org.projectfloodlight.openflow.exceptions.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Set;
import com.google.common.collect.ImmutableSet;
import org.jboss.netty.buffer.ChannelBuffer;
import com.google.common.hash.PrimitiveSink;
import com.google.common.hash.Funnel;
class OFBsnImageDescStatsRequestVer14 implements OFBsnImageDescStatsRequest {
private static final Logger logger = LoggerFactory.getLogger(OFBsnImageDescStatsRequestVer14.class);
// version: 1.4
final static byte WIRE_VERSION = 5;
final static int LENGTH = 24;
private final static long DEFAULT_XID = 0x0L;
private final static Set<OFStatsRequestFlags> DEFAULT_FLAGS = ImmutableSet.<OFStatsRequestFlags>of();
// OF message fields
private final long xid;
private final Set<OFStatsRequestFlags> flags;
//
// Immutable default instance
final static OFBsnImageDescStatsRequestVer14 DEFAULT = new OFBsnImageDescStatsRequestVer14(
DEFAULT_XID, DEFAULT_FLAGS
);
// package private constructor - used by readers, builders, and factory
OFBsnImageDescStatsRequestVer14(long xid, Set<OFStatsRequestFlags> flags) {
if(flags == null) {
throw new NullPointerException("OFBsnImageDescStatsRequestVer14: property flags cannot be null");
}
this.xid = xid;
this.flags = flags;
}
// Accessors for OF message fields
@Override
public OFVersion getVersion() {
return OFVersion.OF_14;
}
@Override
public OFType getType() {
return OFType.STATS_REQUEST;
}
@Override
public long getXid() {
return xid;
}
@Override
public OFStatsType getStatsType() {
return OFStatsType.EXPERIMENTER;
}
@Override
public Set<OFStatsRequestFlags> getFlags() {
return flags;
}
@Override
public long getExperimenter() {
return 0x5c16c7L;
}
@Override
public long getSubtype() {
return 0xeL;
}
public OFBsnImageDescStatsRequest.Builder createBuilder() {
return new BuilderWithParent(this);
}
static class BuilderWithParent implements OFBsnImageDescStatsRequest.Builder {
final OFBsnImageDescStatsRequestVer14 parentMessage;
// OF message fields
private boolean xidSet;
private long xid;
private boolean flagsSet;
private Set<OFStatsRequestFlags> flags;
BuilderWithParent(OFBsnImageDescStatsRequestVer14 parentMessage) {
this.parentMessage = parentMessage;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_14;
}
@Override
public OFType getType() {
return OFType.STATS_REQUEST;
}
@Override
public long getXid() {
return xid;
}
@Override
public OFBsnImageDescStatsRequest.Builder setXid(long xid) {
this.xid = xid;
this.xidSet = true;
return this;
}
@Override
public OFStatsType getStatsType() {
return OFStatsType.EXPERIMENTER;
}
@Override
public Set<OFStatsRequestFlags> getFlags() {
return flags;
}
@Override
public OFBsnImageDescStatsRequest.Builder setFlags(Set<OFStatsRequestFlags> flags) {
this.flags = flags;
this.flagsSet = true;
return this;
}
@Override
public long getExperimenter() {
return 0x5c16c7L;
}
@Override
public long getSubtype() {
return 0xeL;
}
@Override
public OFBsnImageDescStatsRequest build() {
long xid = this.xidSet ? this.xid : parentMessage.xid;
Set<OFStatsRequestFlags> flags = this.flagsSet ? this.flags : parentMessage.flags;
if(flags == null)
throw new NullPointerException("Property flags must not be null");
//
return new OFBsnImageDescStatsRequestVer14(
xid,
flags
);
}
}
static class Builder implements OFBsnImageDescStatsRequest.Builder {
// OF message fields
private boolean xidSet;
private long xid;
private boolean flagsSet;
private Set<OFStatsRequestFlags> flags;
@Override
public OFVersion getVersion() {
return OFVersion.OF_14;
}
@Override
public OFType getType() {
return OFType.STATS_REQUEST;
}
@Override
public long getXid() {
return xid;
}
@Override
public OFBsnImageDescStatsRequest.Builder setXid(long xid) {
this.xid = xid;
this.xidSet = true;
return this;
}
@Override
public OFStatsType getStatsType() {
return OFStatsType.EXPERIMENTER;
}
@Override
public Set<OFStatsRequestFlags> getFlags() {
return flags;
}
@Override
public OFBsnImageDescStatsRequest.Builder setFlags(Set<OFStatsRequestFlags> flags) {
this.flags = flags;
this.flagsSet = true;
return this;
}
@Override
public long getExperimenter() {
return 0x5c16c7L;
}
@Override
public long getSubtype() {
return 0xeL;
}
//
@Override
public OFBsnImageDescStatsRequest build() {
long xid = this.xidSet ? this.xid : DEFAULT_XID;
Set<OFStatsRequestFlags> flags = this.flagsSet ? this.flags : DEFAULT_FLAGS;
if(flags == null)
throw new NullPointerException("Property flags must not be null");
return new OFBsnImageDescStatsRequestVer14(
xid,
flags
);
}
}
final static Reader READER = new Reader();
static class Reader implements OFMessageReader<OFBsnImageDescStatsRequest> {
@Override
public OFBsnImageDescStatsRequest readFrom(ChannelBuffer bb) throws OFParseError {
int start = bb.readerIndex();
// fixed value property version == 5
byte version = bb.readByte();
if(version != (byte) 0x5)
throw new OFParseError("Wrong version: Expected=OFVersion.OF_14(5), got="+version);
// fixed value property type == 18
byte type = bb.readByte();
if(type != (byte) 0x12)
throw new OFParseError("Wrong type: Expected=OFType.STATS_REQUEST(18), got="+type);
int length = U16.f(bb.readShort());
if(length != 24)
throw new OFParseError("Wrong length: Expected=24(24), got="+length);
if(bb.readableBytes() + (bb.readerIndex() - start) < length) {
// Buffer does not have all data yet
bb.readerIndex(start);
return null;
}
if(logger.isTraceEnabled())
logger.trace("readFrom - length={}", length);
long xid = U32.f(bb.readInt());
// fixed value property statsType == 65535
short statsType = bb.readShort();
if(statsType != (short) 0xffff)
throw new OFParseError("Wrong statsType: Expected=OFStatsType.EXPERIMENTER(65535), got="+statsType);
Set<OFStatsRequestFlags> flags = OFStatsRequestFlagsSerializerVer14.readFrom(bb);
// pad: 4 bytes
bb.skipBytes(4);
// fixed value property experimenter == 0x5c16c7L
int experimenter = bb.readInt();
if(experimenter != 0x5c16c7)
throw new OFParseError("Wrong experimenter: Expected=0x5c16c7L(0x5c16c7L), got="+experimenter);
// fixed value property subtype == 0xeL
int subtype = bb.readInt();
if(subtype != 0xe)
throw new OFParseError("Wrong subtype: Expected=0xeL(0xeL), got="+subtype);
OFBsnImageDescStatsRequestVer14 bsnImageDescStatsRequestVer14 = new OFBsnImageDescStatsRequestVer14(
xid,
flags
);
if(logger.isTraceEnabled())
logger.trace("readFrom - read={}", bsnImageDescStatsRequestVer14);
return bsnImageDescStatsRequestVer14;
}
}
public void putTo(PrimitiveSink sink) {
FUNNEL.funnel(this, sink);
}
final static OFBsnImageDescStatsRequestVer14Funnel FUNNEL = new OFBsnImageDescStatsRequestVer14Funnel();
static class OFBsnImageDescStatsRequestVer14Funnel implements Funnel<OFBsnImageDescStatsRequestVer14> {
private static final long serialVersionUID = 1L;
@Override
public void funnel(OFBsnImageDescStatsRequestVer14 message, PrimitiveSink sink) {
// fixed value property version = 5
sink.putByte((byte) 0x5);
// fixed value property type = 18
sink.putByte((byte) 0x12);
// fixed value property length = 24
sink.putShort((short) 0x18);
sink.putLong(message.xid);
// fixed value property statsType = 65535
sink.putShort((short) 0xffff);
OFStatsRequestFlagsSerializerVer14.putTo(message.flags, sink);
// skip pad (4 bytes)
// fixed value property experimenter = 0x5c16c7L
sink.putInt(0x5c16c7);
// fixed value property subtype = 0xeL
sink.putInt(0xe);
}
}
public void writeTo(ChannelBuffer bb) {
WRITER.write(bb, this);
}
final static Writer WRITER = new Writer();
static class Writer implements OFMessageWriter<OFBsnImageDescStatsRequestVer14> {
@Override
public void write(ChannelBuffer bb, OFBsnImageDescStatsRequestVer14 message) {
// fixed value property version = 5
bb.writeByte((byte) 0x5);
// fixed value property type = 18
bb.writeByte((byte) 0x12);
// fixed value property length = 24
bb.writeShort((short) 0x18);
bb.writeInt(U32.t(message.xid));
// fixed value property statsType = 65535
bb.writeShort((short) 0xffff);
OFStatsRequestFlagsSerializerVer14.writeTo(bb, message.flags);
// pad: 4 bytes
bb.writeZero(4);
// fixed value property experimenter = 0x5c16c7L
bb.writeInt(0x5c16c7);
// fixed value property subtype = 0xeL
bb.writeInt(0xe);
}
}
@Override
public String toString() {
StringBuilder b = new StringBuilder("OFBsnImageDescStatsRequestVer14(");
b.append("xid=").append(xid);
b.append(", ");
b.append("flags=").append(flags);
b.append(")");
return b.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OFBsnImageDescStatsRequestVer14 other = (OFBsnImageDescStatsRequestVer14) obj;
if( xid != other.xid)
return false;
if (flags == null) {
if (other.flags != null)
return false;
} else if (!flags.equals(other.flags))
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * (int) (xid ^ (xid >>> 32));
result = prime * result + ((flags == null) ? 0 : flags.hashCode());
return result;
}
}
| |
/*
* Copyright (c) 2009 The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.gatk.iterators;
import net.sf.picard.util.PeekableIterator;
import net.sf.samtools.Cigar;
import net.sf.samtools.CigarElement;
import net.sf.samtools.CigarOperator;
import net.sf.samtools.SAMRecord;
import org.apache.log4j.Logger;
import org.broadinstitute.sting.gatk.DownsampleType;
import org.broadinstitute.sting.gatk.DownsamplingMethod;
import org.broadinstitute.sting.gatk.ReadProperties;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import org.broadinstitute.sting.utils.GenomeLoc;
import org.broadinstitute.sting.utils.GenomeLocParser;
import org.broadinstitute.sting.utils.MathUtils;
import org.broadinstitute.sting.utils.ReservoirDownsampler;
import org.broadinstitute.sting.utils.exceptions.UserException;
import org.broadinstitute.sting.utils.pileup.ExtendedEventPileupElement;
import org.broadinstitute.sting.utils.pileup.PileupElement;
import org.broadinstitute.sting.utils.pileup.ReadBackedExtendedEventPileupImpl;
import org.broadinstitute.sting.utils.pileup.ReadBackedPileupImpl;
import org.broadinstitute.sting.utils.sam.GATKSAMRecord;
import org.broadinstitute.sting.utils.sam.ReadUtils;
import java.util.*;
/**
* Iterator that traverses a SAM File, accumulating information on a per-locus basis
*/
public class LocusIteratorByState extends LocusIterator {
/**
* our log, which we want to capture anything from this class
*/
private static Logger logger = Logger.getLogger(LocusIteratorByState.class);
// -----------------------------------------------------------------------------------------------------------------
//
// member fields
//
// -----------------------------------------------------------------------------------------------------------------
private boolean hasExtendedEvents = false; // will be set to true if at least one read had an indel right before the current position
/**
* Used to create new GenomeLocs.
*/
private final GenomeLocParser genomeLocParser;
private final ArrayList<String> samples;
private final ReadStateManager readStates;
static private class SAMRecordState {
SAMRecord read;
int readOffset = -1; // how far are we offset from the start of the read bases?
int genomeOffset = -1; // how far are we offset from the alignment start on the genome?
Cigar cigar = null;
int cigarOffset = -1;
CigarElement curElement = null;
int nCigarElements = 0;
int cigarElementCounter = -1; // how far are we into a single cigarElement
// The logical model for generating extended events is as follows: the "record state" implements the traversal
// along the reference; thus stepForwardOnGenome() returns on every and only on actual reference bases. This
// can be a (mis)match or a deletion (in the latter case, we still return on every individual reference base the
// deletion spans). In the extended events mode, the record state also remembers if there was an insertion, or
// if the deletion just started *right before* the current reference base the record state is pointing to upon the return from
// stepForwardOnGenome(). The next call to stepForwardOnGenome() will clear that memory (as we remember only extended
// events immediately preceding the current reference base).
boolean generateExtendedEvents = true; // should we generate an additional, special pile for indels between the ref bases?
// the only purpose of this flag is to shield away a few additional lines of code
// when extended piles are not needed, it may not be even worth it...
byte[] insertedBases = null; // remember full inserted sequence if we are generating piles of extended events (indels)
int eventLength = -1; // will be set to the length of insertion/deletion if we are generating piles of extended events
byte eventDelayedFlag = 0; // will be set to non-0 if there was an event (indel) right before the
// current base on the ref. We use a counter-like variable here since clearing the indel event is
// delayed by one base, so we need to remember how long ago we have seen the actual event
int eventStart = -1; // where on the read the extended event starts (i.e. the last position on the read prior to the
// event, or -1 if alignment starts with an insertion); this one is easy to recompute on the fly,
// we cache it here mainly for convenience
public SAMRecordState(SAMRecord read, boolean extended) {
this.read = read;
cigar = read.getCigar();
nCigarElements = cigar.numCigarElements();
generateExtendedEvents = extended;
//System.out.printf("Creating a SAMRecordState: %s%n", this);
}
public SAMRecord getRead() {
return read;
}
/**
* What is our current offset in the read's bases that aligns us with the reference genome?
*
* @return
*/
public int getReadOffset() {
return readOffset;
}
/**
* What is the current offset w.r.t. the alignment state that aligns us to the readOffset?
*
* @return
*/
public int getGenomeOffset() {
return genomeOffset;
}
public int getGenomePosition() {
return read.getAlignmentStart() + getGenomeOffset();
}
public GenomeLoc getLocation(GenomeLocParser genomeLocParser) {
return genomeLocParser.createGenomeLoc(read.getReferenceName(), getGenomePosition());
}
public CigarOperator getCurrentCigarOperator() {
return curElement.getOperator();
}
/**
* Returns true if we just stepped over insertion/into a deletion prior to the last return from stepForwardOnGenome.
*
* @return
*/
public boolean hadIndel() {
return (eventLength > 0);
}
public int getEventLength() {
return eventLength;
}
public byte[] getEventBases() {
return insertedBases;
}
public int getReadEventStartOffset() {
return eventStart;
}
public String toString() {
return String.format("%s ro=%d go=%d co=%d cec=%d %s", read.getReadName(), readOffset, genomeOffset, cigarOffset, cigarElementCounter, curElement);
}
public CigarElement peekForwardOnGenome() {
return ( cigarElementCounter + 1 > curElement.getLength() && cigarOffset + 1 < nCigarElements ? cigar.getCigarElement(cigarOffset + 1) : curElement );
}
public CigarOperator stepForwardOnGenome() {
// we enter this method with readOffset = index of the last processed base on the read
// (-1 if we did not process a single base yet); this can be last matching base, or last base of an insertion
if (curElement == null || ++cigarElementCounter > curElement.getLength()) {
cigarOffset++;
if (cigarOffset < nCigarElements) {
curElement = cigar.getCigarElement(cigarOffset);
cigarElementCounter = 0;
// next line: guards against cigar elements of length 0; when new cigar element is retrieved,
// we reenter in order to re-check cigarElementCounter against curElement's length
return stepForwardOnGenome();
} else {
if (curElement != null && curElement.getOperator() == CigarOperator.D)
throw new UserException.MalformedBAM(read, "read ends with deletion. Cigar: " + read.getCigarString());
// Reads that contain indels model the genomeOffset as the following base in the reference. Because
// we fall into this else block only when indels end the read, increment genomeOffset such that the
// current offset of this read is the next ref base after the end of the indel. This position will
// model a point on the reference somewhere after the end of the read.
genomeOffset++; // extended events need that. Logically, it's legal to advance the genomic offset here:
// we do step forward on the ref, and by returning null we also indicate that we are past the read end.
if (generateExtendedEvents && eventDelayedFlag > 0) {
// if we had an indel right before the read ended (i.e. insertion was the last cigar element),
// we keep it until next reference base; then we discard it and this will allow the LocusIterator to
// finally discard this read
eventDelayedFlag--;
if (eventDelayedFlag == 0) {
eventLength = -1; // reset event when we are past it
insertedBases = null;
eventStart = -1;
}
}
return null;
}
}
boolean done = false;
switch (curElement.getOperator()) {
case H: // ignore hard clips
case P: // ignore pads
cigarElementCounter = curElement.getLength();
break;
case I: // insertion w.r.t. the reference
if (generateExtendedEvents) {
// we see insertions only once, when we step right onto them; the position on the read is scrolled
// past the insertion right after that
if (eventDelayedFlag > 1)
throw new UserException.MalformedBAM(read, String.format("Adjacent I/D events in read %s -- cigar: %s", read.getReadName(), read.getCigarString()));
insertedBases = Arrays.copyOfRange(read.getReadBases(), readOffset + 1, readOffset + 1 + curElement.getLength());
eventLength = curElement.getLength();
eventStart = readOffset;
eventDelayedFlag = 2; // insertion causes re-entry into stepForwardOnGenome, so we set the delay to 2
// System.out.println("Inserted "+(new String (insertedBases)) +" after "+readOffset);
} // continue onto the 'S' case !
case S: // soft clip
cigarElementCounter = curElement.getLength();
readOffset += curElement.getLength();
break;
case D: // deletion w.r.t. the reference
if (readOffset < 0) // we don't want reads starting with deletion, this is a malformed cigar string
throw new UserException.MalformedBAM(read, "Read starting with deletion. Cigar: " + read.getCigarString());
if (generateExtendedEvents) {
if (cigarElementCounter == 1) {
// generate an extended event only if we just stepped into the deletion (i.e. don't
// generate the event at every deleted position on the ref, that's what cigarElementCounter==1 is for!)
if (eventDelayedFlag > 1)
throw new UserException.MalformedBAM(read, String.format("Adjacent I/D events in read %s -- cigar: %s", read.getReadName(), read.getCigarString()));
eventLength = curElement.getLength();
eventDelayedFlag = 2; // deletion on the ref causes an immediate return, so we have to delay by 1 only
eventStart = readOffset;
insertedBases = null;
// System.out.println("Deleted "+eventLength +" bases after "+readOffset);
}
}
// should be the same as N case
genomeOffset++;
done = true;
break;
case N: // reference skip (looks and gets processed just like a "deletion", just different logical meaning)
genomeOffset++;
done = true;
break;
case M:
readOffset++;
genomeOffset++;
done = true;
break;
default:
throw new IllegalStateException("Case statement didn't deal with cigar op: " + curElement.getOperator());
}
if (generateExtendedEvents) {
if (eventDelayedFlag > 0 && done) {
// if we did make a successful step on the ref, decrement delayed flag. If, upon the decrementing the,
// the flag is 1, we are standing on the reference base right after the indel (so we have to keep it).
// Otherwise, we are away from the previous indel and have to clear our memories...
eventDelayedFlag--; // when we notice an indel, we set delayed flag to 2, so now
// if eventDelayedFlag == 1, an indel occured right before the current base
if (eventDelayedFlag == 0) {
eventLength = -1; // reset event when we are past it
insertedBases = null;
eventStart = -1;
}
}
}
return done ? curElement.getOperator() : stepForwardOnGenome();
}
}
//final boolean DEBUG = false;
//final boolean DEBUG2 = false && DEBUG;
private ReadProperties readInfo;
private AlignmentContext nextAlignmentContext;
// -----------------------------------------------------------------------------------------------------------------
//
// constructors and other basic operations
//
// -----------------------------------------------------------------------------------------------------------------
public LocusIteratorByState(final Iterator<SAMRecord> samIterator, ReadProperties readInformation, GenomeLocParser genomeLocParser, Collection<String> samples) {
this.readInfo = readInformation;
this.genomeLocParser = genomeLocParser;
this.samples = new ArrayList<String>(samples);
this.readStates = new ReadStateManager(samIterator, readInformation.getDownsamplingMethod());
// currently the GATK expects this LocusIteratorByState to accept empty sample lists, when
// there's no read data. So we need to throw this error only when samIterator.hasNext() is true
if (this.samples.isEmpty() && samIterator.hasNext()) {
throw new IllegalArgumentException("samples list must not be empty");
}
}
/**
* For testing only. Assumes that the incoming SAMRecords have no read groups, so creates a dummy sample list
* for the system.
*/
public final static Collection<String> sampleListForSAMWithoutReadGroups() {
List<String> samples = new ArrayList<String>();
samples.add(null);
return samples;
}
public Iterator<AlignmentContext> iterator() {
return this;
}
public void close() {
//this.it.close();
}
public boolean hasNext() {
lazyLoadNextAlignmentContext();
return (nextAlignmentContext != null);
//if ( DEBUG ) System.out.printf("hasNext() = %b%n", r);
}
private GenomeLoc getLocation() {
return readStates.isEmpty() ? null : readStates.getFirst().getLocation(genomeLocParser);
}
// -----------------------------------------------------------------------------------------------------------------
//
// next() routine and associated collection operations
//
// -----------------------------------------------------------------------------------------------------------------
public AlignmentContext next() {
lazyLoadNextAlignmentContext();
if (!hasNext())
throw new NoSuchElementException("LocusIteratorByState: out of elements.");
AlignmentContext currentAlignmentContext = nextAlignmentContext;
nextAlignmentContext = null;
return currentAlignmentContext;
}
/**
* Creates the next alignment context from the given state. Note that this is implemented as a lazy load method.
* nextAlignmentContext MUST BE null in order for this method to advance to the next entry.
*/
private void lazyLoadNextAlignmentContext() {
while (nextAlignmentContext == null && readStates.hasNext()) {
// this call will set hasExtendedEvents to true if it picks up a read with indel right before the current position on the ref:
readStates.collectPendingReads();
int size = 0;
int nDeletions = 0;
int nInsertions = 0;
int nMQ0Reads = 0;
// if extended events are requested, and if previous traversal step brought us over an indel in
// at least one read, we emit extended pileup (making sure that it is associated with the previous base,
// i.e. the one right *before* the indel) and do NOT shift the current position on the ref.
// In this case, the subsequent call to next() will emit the normal pileup at the current base
// and shift the position.
if (readInfo.generateExtendedEvents() && hasExtendedEvents) {
Map<String, ReadBackedExtendedEventPileupImpl> fullExtendedEventPileup = new HashMap<String, ReadBackedExtendedEventPileupImpl>();
// get current location on the reference and decrement it by 1: the indels we just stepped over
// are associated with the *previous* reference base
GenomeLoc loc = genomeLocParser.incPos(getLocation(), -1);
boolean hasBeenSampled = false;
for (final String sample : samples) {
Iterator<SAMRecordState> iterator = readStates.iterator(sample);
List<ExtendedEventPileupElement> indelPile = new ArrayList<ExtendedEventPileupElement>(readStates.size(sample));
hasBeenSampled |= loc.getStart() <= readStates.getDownsamplingExtent(sample);
size = 0;
nDeletions = 0;
nInsertions = 0;
nMQ0Reads = 0;
int maxDeletionLength = 0;
while (iterator.hasNext()) {
final SAMRecordState state = iterator.next();
final GATKSAMRecord read = (GATKSAMRecord) state.getRead(); // the actual read
final CigarOperator op = state.getCurrentCigarOperator(); // current cigar operator
final int readOffset = state.getReadOffset(); // the base offset on this read
final int eventStartOffset = state.getReadEventStartOffset(); // this will be -1 if base is not a deletion, or if base is the first deletion in the event. Otherwise, it will give the last base before the deletion began.
final int eventLength = state.getEventLength();
if (op == CigarOperator.N) // N's are never added to any pileup
continue;
if (state.hadIndel()) { // this read has an indel associated with the previous position on the ref
size++;
ExtendedEventPileupElement pileupElement;
if (state.getEventBases() == null) { // Deletion event
nDeletions++;
maxDeletionLength = Math.max(maxDeletionLength, state.getEventLength());
pileupElement = new ExtendedEventPileupElement(read, eventStartOffset, eventLength);
}
else { // Insertion event
nInsertions++;
pileupElement = new ExtendedEventPileupElement(read, eventStartOffset, eventLength, state.getEventBases());
}
if (read.getMappingQuality() == 0)
nMQ0Reads++;
indelPile.add(pileupElement);
}
// this read has no indel so add it to the pileup as a NOEVENT:
// a deletion that didn't start here (therefore, not an extended event)
// we add (mis)matches as no events.
else if (op != CigarOperator.D || readInfo.includeReadsWithDeletionAtLoci()) {
size++;
indelPile.add(new ExtendedEventPileupElement((GATKSAMRecord) state.getRead(), readOffset));
if (read.getMappingQuality() == 0)
nMQ0Reads++;
}
}
if (indelPile.size() != 0)
fullExtendedEventPileup.put(sample, new ReadBackedExtendedEventPileupImpl(loc, indelPile, size, maxDeletionLength, nInsertions, nDeletions, nMQ0Reads));
}
hasExtendedEvents = false; // we are done with extended events prior to current ref base
nextAlignmentContext = new AlignmentContext(loc, new ReadBackedExtendedEventPileupImpl(loc, fullExtendedEventPileup), hasBeenSampled);
}
else { // this is a regular event pileup (not extended)
GenomeLoc location = getLocation();
Map<String, ReadBackedPileupImpl> fullPileup = new HashMap<String, ReadBackedPileupImpl>();
boolean hasBeenSampled = false;
for (final String sample : samples) {
Iterator<SAMRecordState> iterator = readStates.iterator(sample);
List<PileupElement> pile = new ArrayList<PileupElement>(readStates.size(sample));
hasBeenSampled |= location.getStart() <= readStates.getDownsamplingExtent(sample);
size = 0; // number of elements in this sample's pileup
nDeletions = 0; // number of deletions in this sample's pileup
nMQ0Reads = 0; // number of MQ0 reads in this sample's pileup (warning: current implementation includes N bases that are MQ0)
while (iterator.hasNext()) {
final SAMRecordState state = iterator.next(); // state object with the read/offset information
final GATKSAMRecord read = (GATKSAMRecord) state.getRead(); // the actual read
final CigarOperator op = state.getCurrentCigarOperator(); // current cigar operator
final CigarElement nextElement = state.peekForwardOnGenome(); // next cigar element
final CigarOperator nextOp = nextElement.getOperator();
final int readOffset = state.getReadOffset(); // the base offset on this read
int nextElementLength = nextElement.getLength();
if (op == CigarOperator.N) // N's are never added to any pileup
continue;
if (op == CigarOperator.D) {
if (readInfo.includeReadsWithDeletionAtLoci()) { // only add deletions to the pileup if we are authorized to do so
pile.add(new PileupElement(read, readOffset, true, nextOp == CigarOperator.D, nextOp == CigarOperator.I, nextOp == CigarOperator.S || (state.getGenomeOffset() == 0 && read.getSoftStart() != read.getAlignmentStart()),
null,nextOp == CigarOperator.D? nextElementLength:-1));
size++;
nDeletions++;
if (read.getMappingQuality() == 0)
nMQ0Reads++;
}
}
else {
if (!filterBaseInRead(read, location.getStart())) {
String insertedBaseString = null;
if (nextOp == CigarOperator.I) {
insertedBaseString = new String(Arrays.copyOfRange(read.getReadBases(), readOffset + 1, readOffset + 1 + nextElement.getLength()));
}
pile.add(new PileupElement(read, readOffset, false, nextOp == CigarOperator.D, nextOp == CigarOperator.I, nextOp == CigarOperator.S || (state.getGenomeOffset() == 0 && read.getSoftStart() != read.getAlignmentStart()),
insertedBaseString,nextElementLength));
size++;
if (read.getMappingQuality() == 0)
nMQ0Reads++;
}
}
}
if (pile.size() != 0) // if this pileup added at least one base, add it to the full pileup
fullPileup.put(sample, new ReadBackedPileupImpl(location, pile, size, nDeletions, nMQ0Reads));
}
updateReadStates(); // critical - must be called after we get the current state offsets and location
if (!fullPileup.isEmpty()) // if we got reads with non-D/N over the current position, we are done
nextAlignmentContext = new AlignmentContext(location, new ReadBackedPileupImpl(location, fullPileup), hasBeenSampled);
}
}
}
// fast testing of position
private boolean readIsPastCurrentPosition(SAMRecord read) {
if (readStates.isEmpty())
return false;
else {
SAMRecordState state = readStates.getFirst();
SAMRecord ourRead = state.getRead();
return read.getReferenceIndex() > ourRead.getReferenceIndex() || read.getAlignmentStart() > state.getGenomePosition();
}
}
/**
* Generic place to put per-base filters appropriate to LocusIteratorByState
*
* @param rec
* @param pos
* @return
*/
private static boolean filterBaseInRead(GATKSAMRecord rec, long pos) {
return ReadUtils.isBaseInsideAdaptor(rec, pos);
}
private void updateReadStates() {
for (final String sample : samples) {
Iterator<SAMRecordState> it = readStates.iterator(sample);
while (it.hasNext()) {
SAMRecordState state = it.next();
CigarOperator op = state.stepForwardOnGenome();
if (state.hadIndel() && readInfo.generateExtendedEvents())
hasExtendedEvents = true;
else if (op == null) {
// we discard the read only when we are past its end AND indel at the end of the read (if any) was
// already processed. Keeping the read state that retunred null upon stepForwardOnGenome() is safe
// as the next call to stepForwardOnGenome() will return null again AND will clear hadIndel() flag.
it.remove(); // we've stepped off the end of the object
}
}
}
}
public void remove() {
throw new UnsupportedOperationException("Can not remove records from a SAM file via an iterator!");
}
private class ReadStateManager {
private final PeekableIterator<SAMRecord> iterator;
private final DownsamplingMethod downsamplingMethod;
private final SamplePartitioner samplePartitioner;
private final Map<String, PerSampleReadStateManager> readStatesBySample = new HashMap<String, PerSampleReadStateManager>();
private final int targetCoverage;
private int totalReadStates = 0;
public ReadStateManager(Iterator<SAMRecord> source, DownsamplingMethod downsamplingMethod) {
this.iterator = new PeekableIterator<SAMRecord>(source);
this.downsamplingMethod = downsamplingMethod.type != null ? downsamplingMethod : DownsamplingMethod.NONE;
switch (this.downsamplingMethod.type) {
case BY_SAMPLE:
if (downsamplingMethod.toCoverage == null)
throw new UserException.BadArgumentValue("dcov", "Downsampling coverage (-dcov) must be specified when downsampling by sample");
this.targetCoverage = downsamplingMethod.toCoverage;
break;
default:
this.targetCoverage = Integer.MAX_VALUE;
}
Map<String, ReadSelector> readSelectors = new HashMap<String, ReadSelector>();
for (final String sample : samples) {
readStatesBySample.put(sample, new PerSampleReadStateManager());
readSelectors.put(sample, downsamplingMethod.type == DownsampleType.BY_SAMPLE ? new NRandomReadSelector(null, targetCoverage) : new AllReadsSelector());
}
samplePartitioner = new SamplePartitioner(readSelectors);
}
/**
* Returns a iterator over all the reads associated with the given sample. Note that remove() is implemented
* for this iterator; if present, total read states will be decremented.
*
* @param sample The sample.
* @return Iterator over the reads associated with that sample.
*/
public Iterator<SAMRecordState> iterator(final String sample) {
return new Iterator<SAMRecordState>() {
private Iterator<SAMRecordState> wrappedIterator = readStatesBySample.get(sample).iterator();
public boolean hasNext() {
return wrappedIterator.hasNext();
}
public SAMRecordState next() {
return wrappedIterator.next();
}
public void remove() {
wrappedIterator.remove();
totalReadStates--;
}
};
}
public boolean isEmpty() {
return totalReadStates == 0;
}
/**
* Retrieves the total number of reads in the manager across all samples.
*
* @return Total number of reads over all samples.
*/
public int size() {
return totalReadStates;
}
/**
* Retrieves the total number of reads in the manager in the given sample.
*
* @param sample The sample.
* @return Total number of reads in the given sample.
*/
public int size(final String sample) {
return readStatesBySample.get(sample).size();
}
/**
* The extent of downsampling; basically, the furthest base out which has 'fallen
* victim' to the downsampler.
*
* @param sample Sample, downsampled independently.
* @return Integer stop of the furthest undownsampled region.
*/
public int getDownsamplingExtent(final String sample) {
return readStatesBySample.get(sample).getDownsamplingExtent();
}
public SAMRecordState getFirst() {
for (final String sample : samples) {
PerSampleReadStateManager reads = readStatesBySample.get(sample);
if (!reads.isEmpty())
return reads.peek();
}
return null;
}
public boolean hasNext() {
return totalReadStates > 0 || iterator.hasNext();
}
public void collectPendingReads() {
if (!iterator.hasNext())
return;
if (readStates.size() == 0) {
int firstContigIndex = iterator.peek().getReferenceIndex();
int firstAlignmentStart = iterator.peek().getAlignmentStart();
while (iterator.hasNext() && iterator.peek().getReferenceIndex() == firstContigIndex && iterator.peek().getAlignmentStart() == firstAlignmentStart) {
samplePartitioner.submitRead(iterator.next());
}
} else {
// Fast fail in the case that the read is past the current position.
if (readIsPastCurrentPosition(iterator.peek()))
return;
while (iterator.hasNext() && !readIsPastCurrentPosition(iterator.peek())) {
samplePartitioner.submitRead(iterator.next());
}
}
samplePartitioner.complete();
for (final String sample : samples) {
ReadSelector aggregator = samplePartitioner.getSelectedReads(sample);
Collection<SAMRecord> newReads = new ArrayList<SAMRecord>(aggregator.getSelectedReads());
PerSampleReadStateManager statesBySample = readStatesBySample.get(sample);
int numReads = statesBySample.size();
int downsamplingExtent = aggregator.getDownsamplingExtent();
if (numReads + newReads.size() <= targetCoverage || downsamplingMethod.type == DownsampleType.NONE) {
long readLimit = aggregator.getNumReadsSeen();
addReadsToSample(statesBySample, newReads, readLimit);
statesBySample.specifyNewDownsamplingExtent(downsamplingExtent);
} else {
int[] counts = statesBySample.getCountsPerAlignmentStart();
int[] updatedCounts = new int[counts.length];
System.arraycopy(counts, 0, updatedCounts, 0, counts.length);
boolean readPruned = true;
while (numReads + newReads.size() > targetCoverage && readPruned) {
readPruned = false;
for (int alignmentStart = updatedCounts.length - 1; numReads + newReads.size() > targetCoverage && alignmentStart >= 0; alignmentStart--) {
if (updatedCounts[alignmentStart] > 500) {
updatedCounts[alignmentStart]--;
numReads--;
readPruned = true;
}
}
}
if (numReads == targetCoverage) {
updatedCounts[0]--;
numReads--;
}
BitSet toPurge = new BitSet(readStates.size());
int readOffset = 0;
for (int i = 0; i < updatedCounts.length; i++) {
int n = counts[i];
int k = updatedCounts[i];
for (Integer purgedElement : MathUtils.sampleIndicesWithoutReplacement(n, n - k))
toPurge.set(readOffset + purgedElement);
readOffset += counts[i];
}
downsamplingExtent = Math.max(downsamplingExtent, statesBySample.purge(toPurge));
addReadsToSample(statesBySample, newReads, targetCoverage - numReads);
statesBySample.specifyNewDownsamplingExtent(downsamplingExtent);
}
}
samplePartitioner.reset();
}
/**
* Add reads with the given sample name to the given hanger entry.
*
* @param readStates The list of read states to add this collection of reads.
* @param reads Reads to add. Selected reads will be pulled from this source.
* @param maxReads Maximum number of reads to add.
*/
private void addReadsToSample(final PerSampleReadStateManager readStates, final Collection<SAMRecord> reads, final long maxReads) {
if (reads.isEmpty())
return;
Collection<SAMRecordState> newReadStates = new LinkedList<SAMRecordState>();
int readCount = 0;
for (SAMRecord read : reads) {
if (readCount < maxReads) {
SAMRecordState state = new SAMRecordState(read, readInfo.generateExtendedEvents());
state.stepForwardOnGenome();
newReadStates.add(state);
// TODO: What if we downsample the extended events away?
if (state.hadIndel())
hasExtendedEvents = true;
readCount++;
}
}
readStates.addStatesAtNextAlignmentStart(newReadStates);
}
private class PerSampleReadStateManager implements Iterable<SAMRecordState> {
private final Queue<SAMRecordState> readStates = new LinkedList<SAMRecordState>();
private final Deque<Counter> readStateCounter = new LinkedList<Counter>();
private int downsamplingExtent = 0;
public void addStatesAtNextAlignmentStart(Collection<SAMRecordState> states) {
readStates.addAll(states);
readStateCounter.add(new Counter(states.size()));
totalReadStates += states.size();
}
public boolean isEmpty() {
return readStates.isEmpty();
}
public SAMRecordState peek() {
return readStates.peek();
}
public int size() {
return readStates.size();
}
public void specifyNewDownsamplingExtent(int downsamplingExtent) {
this.downsamplingExtent = Math.max(this.downsamplingExtent, downsamplingExtent);
}
public int getDownsamplingExtent() {
return downsamplingExtent;
}
public int[] getCountsPerAlignmentStart() {
int[] counts = new int[readStateCounter.size()];
int index = 0;
for (Counter counter : readStateCounter)
counts[index++] = counter.getCount();
return counts;
}
public Iterator<SAMRecordState> iterator() {
return new Iterator<SAMRecordState>() {
private Iterator<SAMRecordState> wrappedIterator = readStates.iterator();
public boolean hasNext() {
return wrappedIterator.hasNext();
}
public SAMRecordState next() {
return wrappedIterator.next();
}
public void remove() {
wrappedIterator.remove();
Counter counter = readStateCounter.peek();
counter.decrement();
if (counter.getCount() == 0)
readStateCounter.remove();
}
};
}
/**
* Purge the given elements from the bitset. If an element in the bitset is true, purge
* the corresponding read state.
*
* @param elements bits from the set to purge.
* @return the extent of the final downsampled read.
*/
public int purge(final BitSet elements) {
int downsamplingExtent = 0;
if (elements.isEmpty() || readStates.isEmpty()) return downsamplingExtent;
Iterator<SAMRecordState> readStateIterator = readStates.iterator();
Iterator<Counter> counterIterator = readStateCounter.iterator();
Counter currentCounter = counterIterator.next();
int readIndex = 0;
long alignmentStartCounter = currentCounter.getCount();
int toPurge = elements.nextSetBit(0);
int removedCount = 0;
while (readStateIterator.hasNext() && toPurge >= 0) {
SAMRecordState state = readStateIterator.next();
downsamplingExtent = Math.max(downsamplingExtent, state.getRead().getAlignmentEnd());
if (readIndex == toPurge) {
readStateIterator.remove();
currentCounter.decrement();
if (currentCounter.getCount() == 0)
counterIterator.remove();
removedCount++;
toPurge = elements.nextSetBit(toPurge + 1);
}
readIndex++;
alignmentStartCounter--;
if (alignmentStartCounter == 0 && counterIterator.hasNext()) {
currentCounter = counterIterator.next();
alignmentStartCounter = currentCounter.getCount();
}
}
totalReadStates -= removedCount;
return downsamplingExtent;
}
}
}
/**
* Note: assuming that, whenever we downsample, we downsample to an integer capacity.
*/
static private class Counter {
private int count;
public Counter(int count) {
this.count = count;
}
public int getCount() {
return count;
}
public void decrement() {
count--;
}
}
}
/**
* Selects reads passed to it based on a criteria decided through inheritance.
* TODO: This is a temporary abstraction until we can get rid of this downsampling implementation and the mrl option. Get rid of this.
*/
interface ReadSelector {
/**
* All previous selectors in the chain have allowed this read. Submit it to this selector for consideration.
*
* @param read the read to evaluate.
*/
public void submitRead(SAMRecord read);
/**
* A previous selector has deemed this read unfit. Notify this selector so that this selector's counts are valid.
*
* @param read the read previously rejected.
*/
public void notifyReadRejected(SAMRecord read);
/**
* Signal the selector that read additions are complete.
*/
public void complete();
/**
* Retrieve the number of reads seen by this selector so far.
*
* @return number of reads seen.
*/
public long getNumReadsSeen();
/**
* Return the number of reads accepted by this selector so far.
*
* @return number of reads selected.
*/
public long getNumReadsSelected();
/**
* Gets the locus at which the last of the downsampled reads selected by this selector ends. The value returned will be the
* last aligned position from this selection to which a downsampled read aligns -- in other words, if a read is thrown out at
* position 3 whose cigar string is 76M, the value of this parameter will be 78.
*
* @return If any read has been downsampled, this will return the last aligned base of the longest alignment. Else, 0.
*/
public int getDownsamplingExtent();
/**
* Get the reads selected by this selector.
*
* @return collection of reads selected by this selector.
*/
public Collection<SAMRecord> getSelectedReads();
/**
* Reset this collection to its pre-gathered state.
*/
public void reset();
}
/**
* Select every read passed in.
*/
class AllReadsSelector implements ReadSelector {
private Collection<SAMRecord> reads = new LinkedList<SAMRecord>();
private long readsSeen = 0;
private int downsamplingExtent = 0;
public void submitRead(SAMRecord read) {
reads.add(read);
readsSeen++;
}
public void notifyReadRejected(SAMRecord read) {
readsSeen++;
downsamplingExtent = Math.max(downsamplingExtent, read.getAlignmentEnd());
}
public void complete() {
// NO-OP.
}
public long getNumReadsSeen() {
return readsSeen;
}
public long getNumReadsSelected() {
return readsSeen;
}
public int getDownsamplingExtent() {
return downsamplingExtent;
}
public Collection<SAMRecord> getSelectedReads() {
return reads;
}
public void reset() {
reads.clear();
readsSeen = 0;
downsamplingExtent = 0;
}
}
/**
* Select N reads randomly from the input stream.
*/
class NRandomReadSelector implements ReadSelector {
private final ReservoirDownsampler<SAMRecord> reservoir;
private final ReadSelector chainedSelector;
private long readsSeen = 0;
private int downsamplingExtent = 0;
public NRandomReadSelector(ReadSelector chainedSelector, long readLimit) {
this.reservoir = new ReservoirDownsampler<SAMRecord>((int) readLimit);
this.chainedSelector = chainedSelector;
}
public void submitRead(SAMRecord read) {
SAMRecord displaced = reservoir.add(read);
if (displaced != null && chainedSelector != null) {
chainedSelector.notifyReadRejected(read);
downsamplingExtent = Math.max(downsamplingExtent, read.getAlignmentEnd());
}
readsSeen++;
}
public void notifyReadRejected(SAMRecord read) {
readsSeen++;
}
public void complete() {
for (SAMRecord read : reservoir.getDownsampledContents())
chainedSelector.submitRead(read);
if (chainedSelector != null)
chainedSelector.complete();
}
public long getNumReadsSeen() {
return readsSeen;
}
public long getNumReadsSelected() {
return reservoir.size();
}
public int getDownsamplingExtent() {
return downsamplingExtent;
}
public Collection<SAMRecord> getSelectedReads() {
return reservoir.getDownsampledContents();
}
public void reset() {
reservoir.clear();
downsamplingExtent = 0;
if (chainedSelector != null)
chainedSelector.reset();
}
}
/**
* Note: stores reads by sample ID string, not by sample object
*/
class SamplePartitioner implements ReadSelector {
private final Map<String, ReadSelector> readsBySample;
private long readsSeen = 0;
public SamplePartitioner(Map<String, ReadSelector> readSelectors) {
readsBySample = readSelectors;
}
public void submitRead(SAMRecord read) {
String sampleName = read.getReadGroup() != null ? read.getReadGroup().getSample() : null;
if (readsBySample.containsKey(sampleName))
readsBySample.get(sampleName).submitRead(read);
readsSeen++;
}
public void notifyReadRejected(SAMRecord read) {
String sampleName = read.getReadGroup() != null ? read.getReadGroup().getSample() : null;
if (readsBySample.containsKey(sampleName))
readsBySample.get(sampleName).notifyReadRejected(read);
readsSeen++;
}
public void complete() {
// NO-OP.
}
public long getNumReadsSeen() {
return readsSeen;
}
public long getNumReadsSelected() {
return readsSeen;
}
public int getDownsamplingExtent() {
int downsamplingExtent = 0;
for (ReadSelector storage : readsBySample.values())
downsamplingExtent = Math.max(downsamplingExtent, storage.getDownsamplingExtent());
return downsamplingExtent;
}
public Collection<SAMRecord> getSelectedReads() {
throw new UnsupportedOperationException("Cannot directly get selected reads from a read partitioner.");
}
public ReadSelector getSelectedReads(String sampleName) {
if (!readsBySample.containsKey(sampleName))
throw new NoSuchElementException("Sample name not found");
return readsBySample.get(sampleName);
}
public void reset() {
for (ReadSelector storage : readsBySample.values())
storage.reset();
readsSeen = 0;
}
}
| |
package eu.fbk.utils.core;
/*
* Copyright 2011 icedrake
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*/
import com.google.common.base.Charsets;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.CharBuffer;
/**
* Smaz class for compression small strings. Port to java from
* <a href="https://github.com/antirez/smaz/">antirez</a> This class is immutable.
*
* @author icedrake
*/
final class Smaz {
private static final byte UNCOMPRESSED_FLAG = 1;
/* Compression CODEBOOK, used for compression */
private static final String CODEBOOK[] = { "\002s,\266", "\003had\232\002leW", "\003on \216",
"", "\001yS", "\002ma\255\002li\227", "\003or \260", "", "\002ll\230\003s t\277",
"\004fromg\002mel", "", "\003its\332", "\001z\333", "\003ingF", "\001>\336",
"\001 \000\003 (\002nc\344", "\002nd=\003 on\312", "\002ne\213\003hat\276\003re q",
"", "\002ngT\003herz\004have\306\003s o\225", "", "\003ionk\003s a\254\002ly\352",
"\003hisL\003 inN\003 be\252", "", "\003 fo\325\003 of \003 ha\311", "", "\002of\005",
"\003 co\241\002no\267\003 ma\370", "", "", "\003 cl\356\003enta\003 an7",
"\002ns\300\001\"e", "\003n t\217\002ntP\003s, \205",
"\002pe\320\003 we\351\002om\223", "\002on\037", "", "\002y G", "\003 wa\271",
"\003 re\321\002or*", "", "\002=\"\251\002ot\337", "\003forD\002ou[", "\003 toR",
"\003 th\r", "\003 it\366", "\003but\261\002ra\202\003 wi\363\002</\361",
"\003 wh\237", "\002 4", "\003nd ?", "\002re!", "", "\003ng c", "",
"\003ly \307\003ass\323\001a\004\002rir", "", "", "", "\002se_", "\003of \"",
"\003div\364\002ros\003ere\240", "", "\002ta\310\001bZ\002si\324", "",
"\003and\u0007\002rs\335", "\002rt\362", "\002teE", "\003ati\316", "\002so\263",
"\002th\021", "\002tiJ\001c\034\003allp", "\003ate\345", "\002ss\246", "\002stM", "",
"\002><\346", "\002to\024", "\003arew", "\001d\030", "\002tr\303", "",
"\001\n1\003 a \222", "\003f tv\002veo", "\002un\340", "", "\003e o\242",
"\002a \243\002wa\326\001e\002", "\002ur\226\003e a\274", "\002us\244\003\n\r\n\247",
"\002ut\304\003e c\373", "\002we\221", "", "", "\002wh\302", "\001f,", "", "", "",
"\003d t\206", "", "", "\003th \343", "\001g;", "", "", "\001\r9\003e s\265",
"\003e t\234", "", "\003to Y", "\003e\r\n\236", "\002d \036\001h\022", "", "\001,Q",
"\002 a\031", "\002 b^", "\002\r\n\025\002 cI", "\002 d\245", "\002 e\253",
"\002 fh\001i\b\002e \013", "", "\002 hU\001-\314", "\002 i8", "", "", "\002 l\315",
"\002 m{", "\002f :\002 n\354", "\002 o\035", "\002 p}\001.n\003\r\n\r\250", "",
"\002 r\275", "\002 s>", "\002 t\016", "", "\002g \235\005which+\003whi\367",
"\002 w5", "\001/\305", "\003as \214", "\003at \207", "", "\003who\331", "",
"\001l\026\002h \212", "", "\002, $", "", "\004withV", "", "", "", "\001m-", "", "",
"\002ac\357", "\002ad\350", "\003TheH", "", "", "\004this\233\001n\t", "", "\002. y",
"", "\002alX\003e, \365", "\003tio\215\002be\\", "\002an\032\003ver\347", "",
"\004that0\003tha\313\001o\006", "\003was2", "\002arO", "\002as.",
"\002at'\003the\001\004they\200\005there\322\005theird", "\002ce\210", "\004were]", "",
"\002ch\231\002l \264\001p<", "", "", "\003one\256", "", "\003he \023\002dej",
"\003ter\270", "\002cou", "", "\002by\177\002di\201\002eax", "", "\002ec\327",
"\002edB", "\002ee\353", "", "", "\001r\f\002n )", "", "", "", "\002el\262", "",
"\003in i\002en3", "", "\002o `\001s\n", "", "\002er\033", "\003is t\002es6", "",
"\002ge\371", "\004.com\375", "\002fo\334\003our\330", "\003ch \301\001t\003",
"\002hab", "", "\003men\374", "", "\002he\020", "", "", "\001u&", "\002hif", "",
"\003not\204\002ic\203", "\003ed @\002id\355", "", "", "\002ho\273", "\002r K\001vm",
"", "", "", "\003t t\257\002il\360", "\002im\342", "\003en \317\002in\017",
"\002io\220", "\002s \027\001wA", "", "\003er |", "\003es ~\002is%", "\002it/", "",
"\002iv\272", "", "\002t #\u0007http://C\001x\372", "\002la\211", "\001<\341",
"\003, a\224" };
/* Reverse compression CODEBOOK, used for decompression */
private static final String REVERSE_CODEBOOK[] = { " ", "the", "e", "t", "a", "of", "o", "and",
"i", "n", "s", "e ", "r", " th", " t", "in", "he", "th", "h", "he ", "to", "\r\n", "l",
"s ", "d", " a", "an", "er", "c", " o", "d ", "on", " of", "re", "of ", "t ", ", ",
"is", "u", "at", " ", "n ", "or", "which", "f", "m", "as", "it", "that", "\n", "was",
"en", " ", " w", "es", " an", " i", "\r", "f ", "g", "p", "nd", " s", "nd ", "ed ",
"w", "ed", "http://", "for", "te", "ing", "y ", "The", " c", "ti", "r ", "his", "st",
" in", "ar", "nt", ",", " to", "y", "ng", " h", "with", "le", "al", "to ", "b", "ou",
"be", "were", " b", "se", "o ", "ent", "ha", "ng ", "their", "\"", "hi", "from", " f",
"in ", "de", "ion", "me", "v", ".", "ve", "all", "re ", "ri", "ro", "is ", "co", "f t",
"are", "ea", ". ", "her", " m", "er ", " p", "es ", "by", "they", "di", "ra", "ic",
"not", "s, ", "d t", "at ", "ce", "la", "h ", "ne", "as ", "tio", "on ", "n t", "io",
"we", " a ", "om", ", a", "s o", "ur", "li", "ll", "ch", "had", "this", "e t", "g ",
"e\r\n", " wh", "ere", " co", "e o", "a ", "us", " d", "ss", "\n\r\n", "\r\n\r", "=\"",
" be", " e", "s a", "ma", "one", "t t", "or ", "but", "el", "so", "l ", "e s", "s,",
"no", "ter", " wa", "iv", "ho", "e a", " r", "hat", "s t", "ns", "ch ", "wh", "tr",
"ut", "/", "have", "ly ", "ta", " ha", " on", "tha", "-", " l", "ati", "en ", "pe",
" re", "there", "ass", "si", " fo", "wa", "ec", "our", "who", "its", "z", "fo", "rs",
">", "ot", "un", "<", "im", "th ", "nc", "ate", "><", "ver", "ad", " we", "ly", "ee",
" n", "id", " cl", "ac", "il", "</", "rt", " wi", "div", "e, ", " it", "whi", " ma",
"ge", "x", "e c", "men", ".com" };
/**
* Returns compressed byte array for the specified string
*
* @param strg
* @return byte array
* @throws IOException
*/
public static byte[] compress(final String strg) {
final ByteArrayOutputStream output = new ByteArrayOutputStream();
if (!isOnlyAscii(strg)) {
final byte[] bytes = strg.getBytes(Charsets.UTF_8);
output.write(UNCOMPRESSED_FLAG);
output.write(bytes, 0, bytes.length);
return output.toByteArray();
}
final StringBuilder verb = new StringBuilder();
final CharBuffer charBuffer = CharBuffer.wrap(strg);
int inlen;
// loop through input looking for matches in codebook
while ((inlen = charBuffer.remaining()) > 0) {
int h1, h2, h3;
charBuffer.mark();
h1 = h2 = charBuffer.get() << 3;
if (inlen > 1) {
h2 += charBuffer.get();
}
if (inlen > 2) {
h3 = h2 ^ charBuffer.get();
} else {
h3 = 0;
}
charBuffer.reset();
int j = 7;
if (j > inlen) {
j = inlen;
}
boolean found = false;
/*
* Try to lookup substrings into the codebook, starting from the longer to the shorter
* substrings
*/
for (; j > 0; j--) {
CharBuffer slot;
if (j == 1) {
slot = CharBuffer.wrap(CODEBOOK[h1 % 241]);
} else if (j == 2) {
slot = CharBuffer.wrap(CODEBOOK[h2 % 241]);
} else {
slot = CharBuffer.wrap(CODEBOOK[h3 % 241]);
}
final int slotLength = slot.length();
int slotIndex = 0;
int slotEndIndex = slotIndex + j + 1;
while (slotLength > 0 && slotEndIndex <= slotLength) {
if (slot.get(slotIndex) == j && inlen >= j
&& slot.subSequence(slotIndex + 1, slotEndIndex).toString()
.equals(charBuffer.subSequence(0, j).toString())) {
// Match found in codebook
// Add verbatim data if needed
if (verb.length() > 0) {
// output the verbatim data now
outputVerb(output, verb.toString());
verb.setLength(0);
}
// Add encoded data and ditch unnecessary part of input
// string
output.write(slot.get(slot.get(slotIndex) + 1 + slotIndex));
charBuffer.position(charBuffer.position() + j);
inlen -= j;
found = true;
break;
} else {
slotIndex++;
slotEndIndex = slotIndex + j + 1;
}
}
}
// match not found, add to verbatim
if (!found) {
if (inlen > 0) {
inlen--;
verb.append(charBuffer.subSequence(0, 1).toString());
}
charBuffer.position(charBuffer.position() + 1);
}
// If the verbatim buffer is getting too long or we're at the end of
// the doc throw the verbatim buffer to the output queue
final int verbLength = verb.length();
if (verbLength == 255 || verbLength > 0 && inlen == 0) {
outputVerb(output, verb.toString());
verb.setLength(0);
}
}
return output.toByteArray();
}
/**
* Decompress byte array from compress back into String
*
* @param strBytes
* @return decompressed String
* @see Smaz#compress(String)
*/
public static String decompress(final byte[] strBytes) {
if (strBytes[0] == UNCOMPRESSED_FLAG) {
return new String(strBytes, 1, strBytes.length, Charsets.UTF_8);
}
final StringBuilder out = new StringBuilder();
for (int i = 0; i < strBytes.length; i++) {
final char b = (char) (0xFF & strBytes[i]);
if (b == 254) {
out.append((char) strBytes[++i]);
} else if (b == 255) {
final int length = 0xFF & strBytes[++i];
for (int j = 1; j <= length; j++) {
out.append((char) strBytes[i + j]);
}
i += length;
} else {
final int loc = 0xFF & b;
out.append(REVERSE_CODEBOOK[loc]);
}
}
return out.toString();
}
private static boolean isOnlyAscii(final String input) {
final char[] chars = input.toCharArray();
for (final char c : chars) {
if (c <= 31 || c >= 127) {
return false;
}
}
return true;
}
/**
* Outputs the verbatim string to the output stream
*
* @param baos
* @param str
*/
private static void outputVerb(final ByteArrayOutputStream baos, final String str) {
if (str.length() == 1) {
baos.write(254);
baos.write(str.toCharArray()[0]);
} else {
final byte[] bytes = str.getBytes(Charsets.UTF_8);
baos.write(255);
baos.write(str.length());
baos.write(bytes, 0, bytes.length);
}
}
private Smaz() {
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.guacamole.net.auth.simple;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Map;
import java.util.UUID;
import org.apache.guacamole.GuacamoleException;
import org.apache.guacamole.form.Form;
import org.apache.guacamole.net.auth.ActiveConnection;
import org.apache.guacamole.net.auth.ActivityRecord;
import org.apache.guacamole.net.auth.ActivityRecordSet;
import org.apache.guacamole.net.auth.AuthenticationProvider;
import org.apache.guacamole.net.auth.Connection;
import org.apache.guacamole.net.auth.ConnectionGroup;
import org.apache.guacamole.net.auth.ConnectionRecord;
import org.apache.guacamole.net.auth.Directory;
import org.apache.guacamole.net.auth.SharingProfile;
import org.apache.guacamole.net.auth.User;
import org.apache.guacamole.net.auth.UserContext;
import org.apache.guacamole.protocol.GuacamoleConfiguration;
/**
* An extremely simple UserContext implementation which provides access to
* a defined and restricted set of GuacamoleConfigurations. Access to
* querying or modifying either users or permissions is denied.
*/
public class SimpleUserContext implements UserContext {
/**
* The unique identifier of the root connection group.
*/
private static final String ROOT_IDENTIFIER = "ROOT";
/**
* The AuthenticationProvider that created this UserContext.
*/
private final AuthenticationProvider authProvider;
/**
* Reference to the user whose permissions dictate the configurations
* accessible within this UserContext.
*/
private final User self;
/**
* The Directory with access only to the User associated with this
* UserContext.
*/
private final Directory<User> userDirectory;
/**
* The Directory with access only to the root group associated with this
* UserContext.
*/
private final Directory<ConnectionGroup> connectionGroupDirectory;
/**
* The Directory with access to all connections within the root group
* associated with this UserContext.
*/
private final Directory<Connection> connectionDirectory;
/**
* The root connection group.
*/
private final ConnectionGroup rootGroup;
/**
* Creates a new SimpleUserContext which provides access to only those
* configurations within the given Map. The username is assigned
* arbitrarily.
*
* @param authProvider
* The AuthenticationProvider creating this UserContext.
*
* @param configs
* A Map of all configurations for which the user associated with this
* UserContext has read access.
*/
public SimpleUserContext(AuthenticationProvider authProvider,
Map<String, GuacamoleConfiguration> configs) {
this(authProvider, UUID.randomUUID().toString(), configs);
}
/**
* Creates a new SimpleUserContext for the user with the given username
* which provides access to only those configurations within the given Map.
*
* @param authProvider
* The AuthenticationProvider creating this UserContext.
*
* @param username
* The username of the user associated with this UserContext.
*
* @param configs
* A Map of all configurations for which the user associated with
* this UserContext has read access.
*/
public SimpleUserContext(AuthenticationProvider authProvider,
String username, Map<String, GuacamoleConfiguration> configs) {
Collection<String> connectionIdentifiers = new ArrayList<String>(configs.size());
Collection<String> connectionGroupIdentifiers = Collections.singleton(ROOT_IDENTIFIER);
// Produce collection of connections from given configs
Collection<Connection> connections = new ArrayList<Connection>(configs.size());
for (Map.Entry<String, GuacamoleConfiguration> configEntry : configs.entrySet()) {
// Get connection identifier and configuration
String identifier = configEntry.getKey();
GuacamoleConfiguration config = configEntry.getValue();
// Add as simple connection
Connection connection = new SimpleConnection(identifier, identifier, config);
connection.setParentIdentifier(ROOT_IDENTIFIER);
connections.add(connection);
// Add identifier to overall set of identifiers
connectionIdentifiers.add(identifier);
}
// Add root group that contains only the given configurations
this.rootGroup = new SimpleConnectionGroup(
ROOT_IDENTIFIER, ROOT_IDENTIFIER,
connectionIdentifiers, Collections.<String>emptyList()
);
// Build new user from credentials
this.self = new SimpleUser(username, connectionIdentifiers,
connectionGroupIdentifiers);
// Create directories for new user
this.userDirectory = new SimpleUserDirectory(self);
this.connectionDirectory = new SimpleConnectionDirectory(connections);
this.connectionGroupDirectory = new SimpleConnectionGroupDirectory(Collections.singleton(this.rootGroup));
// Associate provided AuthenticationProvider
this.authProvider = authProvider;
}
@Override
public User self() {
return self;
}
@Override
public Object getResource() throws GuacamoleException {
return null;
}
@Override
public AuthenticationProvider getAuthenticationProvider() {
return authProvider;
}
@Override
public Directory<User> getUserDirectory()
throws GuacamoleException {
return userDirectory;
}
@Override
public Directory<Connection> getConnectionDirectory()
throws GuacamoleException {
return connectionDirectory;
}
@Override
public Directory<ConnectionGroup> getConnectionGroupDirectory()
throws GuacamoleException {
return connectionGroupDirectory;
}
@Override
public ConnectionGroup getRootConnectionGroup() throws GuacamoleException {
return rootGroup;
}
@Override
public Directory<SharingProfile> getSharingProfileDirectory()
throws GuacamoleException {
return new SimpleDirectory<SharingProfile>();
}
@Override
public Directory<ActiveConnection> getActiveConnectionDirectory()
throws GuacamoleException {
return new SimpleDirectory<ActiveConnection>();
}
@Override
public ActivityRecordSet<ConnectionRecord> getConnectionHistory()
throws GuacamoleException {
return new SimpleActivityRecordSet<ConnectionRecord>();
}
@Override
public ActivityRecordSet<ActivityRecord> getUserHistory()
throws GuacamoleException {
return new SimpleActivityRecordSet<ActivityRecord>();
}
@Override
public Collection<Form> getUserAttributes() {
return Collections.<Form>emptyList();
}
@Override
public Collection<Form> getConnectionAttributes() {
return Collections.<Form>emptyList();
}
@Override
public Collection<Form> getConnectionGroupAttributes() {
return Collections.<Form>emptyList();
}
@Override
public Collection<Form> getSharingProfileAttributes() {
return Collections.<Form>emptyList();
}
@Override
public void invalidate() {
// Nothing to invalidate
}
}
| |
/*
* Copyright (C) 2008 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.base;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.common.annotations.Beta;
import com.google.common.annotations.GwtCompatible;
import java.io.IOException;
import java.util.AbstractList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
import javax.annotation.CheckReturnValue;
import javax.annotation.Nullable;
/**
* An object which joins pieces of text (specified as an array, {@link Iterable}, varargs or even a
* {@link Map}) with a separator. It either appends the results to an {@link Appendable} or returns
* them as a {@link String}. Example: <pre> {@code
*
* Joiner joiner = Joiner.on("; ").skipNulls();
* . . .
* return joiner.join("Harry", null, "Ron", "Hermione");}</pre>
*
* <p>This returns the string {@code "Harry; Ron; Hermione"}. Note that all input elements are
* converted to strings using {@link Object#toString()} before being appended.
*
* <p>If neither {@link #skipNulls()} nor {@link #useForNull(String)} is specified, the joining
* methods will throw {@link NullPointerException} if any given element is null.
*
* <p><b>Warning: joiner instances are always immutable</b>; a configuration method such as {@code
* useForNull} has no effect on the instance it is invoked on! You must store and use the new joiner
* instance returned by the method. This makes joiners thread-safe, and safe to store as {@code
* static final} constants. <pre> {@code
*
* // Bad! Do not do this!
* Joiner joiner = Joiner.on(',');
* joiner.skipNulls(); // does nothing!
* return joiner.join("wrong", null, "wrong");}</pre>
*
* <p>See the Guava User Guide article on <a href=
* "http://code.google.com/p/guava-libraries/wiki/StringsExplained#Joiner">{@code Joiner}</a>.
*
* @author Kevin Bourrillion
* @since 2.0 (imported from Google Collections Library)
*/
@GwtCompatible
public class Joiner {
/**
* Returns a joiner which automatically places {@code separator} between consecutive elements.
*/
public static Joiner on(String separator) {
return new Joiner(separator);
}
/**
* Returns a joiner which automatically places {@code separator} between consecutive elements.
*/
public static Joiner on(char separator) {
return new Joiner(String.valueOf(separator));
}
private final String separator;
private Joiner(String separator) {
this.separator = checkNotNull(separator);
}
private Joiner(Joiner prototype) {
this.separator = prototype.separator;
}
/**
* Appends the string representation of each of {@code parts}, using the previously configured
* separator between each, to {@code appendable}.
*/
public <A extends Appendable> A appendTo(A appendable, Iterable<?> parts) throws IOException {
return appendTo(appendable, parts.iterator());
}
/**
* Appends the string representation of each of {@code parts}, using the previously configured
* separator between each, to {@code appendable}.
*
* @since 11.0
*/
public <A extends Appendable> A appendTo(A appendable, Iterator<?> parts) throws IOException {
checkNotNull(appendable);
if (parts.hasNext()) {
appendable.append(toString(parts.next()));
while (parts.hasNext()) {
appendable.append(separator);
appendable.append(toString(parts.next()));
}
}
return appendable;
}
/**
* Appends the string representation of each of {@code parts}, using the previously configured
* separator between each, to {@code appendable}.
*/
public final <A extends Appendable> A appendTo(A appendable, Object[] parts) throws IOException {
return appendTo(appendable, Arrays.asList(parts));
}
/**
* Appends to {@code appendable} the string representation of each of the remaining arguments.
*/
public final <A extends Appendable> A appendTo(
A appendable, @Nullable Object first, @Nullable Object second, Object... rest)
throws IOException {
return appendTo(appendable, iterable(first, second, rest));
}
/**
* Appends the string representation of each of {@code parts}, using the previously configured
* separator between each, to {@code builder}. Identical to {@link #appendTo(Appendable,
* Iterable)}, except that it does not throw {@link IOException}.
*/
public final StringBuilder appendTo(StringBuilder builder, Iterable<?> parts) {
return appendTo(builder, parts.iterator());
}
/**
* Appends the string representation of each of {@code parts}, using the previously configured
* separator between each, to {@code builder}. Identical to {@link #appendTo(Appendable,
* Iterable)}, except that it does not throw {@link IOException}.
*
* @since 11.0
*/
public final StringBuilder appendTo(StringBuilder builder, Iterator<?> parts) {
try {
appendTo((Appendable) builder, parts);
} catch (IOException impossible) {
throw new AssertionError(impossible);
}
return builder;
}
/**
* Appends the string representation of each of {@code parts}, using the previously configured
* separator between each, to {@code builder}. Identical to {@link #appendTo(Appendable,
* Iterable)}, except that it does not throw {@link IOException}.
*/
public final StringBuilder appendTo(StringBuilder builder, Object[] parts) {
return appendTo(builder, Arrays.asList(parts));
}
/**
* Appends to {@code builder} the string representation of each of the remaining arguments.
* Identical to {@link #appendTo(Appendable, Object, Object, Object...)}, except that it does not
* throw {@link IOException}.
*/
public final StringBuilder appendTo(
StringBuilder builder, @Nullable Object first, @Nullable Object second, Object... rest) {
return appendTo(builder, iterable(first, second, rest));
}
/**
* Returns a string containing the string representation of each of {@code parts}, using the
* previously configured separator between each.
*/
public final String join(Iterable<?> parts) {
return join(parts.iterator());
}
/**
* Returns a string containing the string representation of each of {@code parts}, using the
* previously configured separator between each.
*
* @since 11.0
*/
public final String join(Iterator<?> parts) {
return appendTo(new StringBuilder(), parts).toString();
}
/**
* Returns a string containing the string representation of each of {@code parts}, using the
* previously configured separator between each.
*/
public final String join(Object[] parts) {
return join(Arrays.asList(parts));
}
/**
* Returns a string containing the string representation of each argument, using the previously
* configured separator between each.
*/
public final String join(@Nullable Object first, @Nullable Object second, Object... rest) {
return join(iterable(first, second, rest));
}
/**
* Returns a joiner with the same behavior as this one, except automatically substituting {@code
* nullText} for any provided null elements.
*/
@CheckReturnValue
public Joiner useForNull(final String nullText) {
checkNotNull(nullText);
return new Joiner(this) {
@Override CharSequence toString(@Nullable Object part) {
return (part == null) ? nullText : Joiner.this.toString(part);
}
@Override public Joiner useForNull(String nullText) {
throw new UnsupportedOperationException("already specified useForNull");
}
@Override public Joiner skipNulls() {
throw new UnsupportedOperationException("already specified useForNull");
}
};
}
/**
* Returns a joiner with the same behavior as this joiner, except automatically skipping over any
* provided null elements.
*/
@CheckReturnValue
public Joiner skipNulls() {
return new Joiner(this) {
@Override public <A extends Appendable> A appendTo(A appendable, Iterator<?> parts)
throws IOException {
checkNotNull(appendable, "appendable");
checkNotNull(parts, "parts");
while (parts.hasNext()) {
Object part = parts.next();
if (part != null) {
appendable.append(Joiner.this.toString(part));
break;
}
}
while (parts.hasNext()) {
Object part = parts.next();
if (part != null) {
appendable.append(separator);
appendable.append(Joiner.this.toString(part));
}
}
return appendable;
}
@Override public Joiner useForNull(String nullText) {
throw new UnsupportedOperationException("already specified skipNulls");
}
@Override public MapJoiner withKeyValueSeparator(String kvs) {
throw new UnsupportedOperationException("can't use .skipNulls() with maps");
}
};
}
/**
* Returns a {@code MapJoiner} using the given key-value separator, and the same configuration as
* this {@code Joiner} otherwise.
*/
@CheckReturnValue
public MapJoiner withKeyValueSeparator(String keyValueSeparator) {
return new MapJoiner(this, keyValueSeparator);
}
/**
* An object that joins map entries in the same manner as {@code Joiner} joins iterables and
* arrays. Like {@code Joiner}, it is thread-safe and immutable.
*
* <p>In addition to operating on {@code Map} instances, {@code MapJoiner} can operate on {@code
* Multimap} entries in two distinct modes:
*
* <ul>
* <li>To output a separate entry for each key-value pair, pass {@code multimap.entries()} to a
* {@code MapJoiner} method that accepts entries as input, and receive output of the form
* {@code key1=A&key1=B&key2=C}.
* <li>To output a single entry for each key, pass {@code multimap.asMap()} to a {@code MapJoiner}
* method that accepts a map as input, and receive output of the form {@code
* key1=[A, B]&key2=C}.
* </ul>
*
* @since 2.0 (imported from Google Collections Library)
*/
public static final class MapJoiner {
private final Joiner joiner;
private final String keyValueSeparator;
private MapJoiner(Joiner joiner, String keyValueSeparator) {
this.joiner = joiner; // only "this" is ever passed, so don't checkNotNull
this.keyValueSeparator = checkNotNull(keyValueSeparator);
}
/**
* Appends the string representation of each entry of {@code map}, using the previously
* configured separator and key-value separator, to {@code appendable}.
*/
public <A extends Appendable> A appendTo(A appendable, Map<?, ?> map) throws IOException {
return appendTo(appendable, map.entrySet());
}
/**
* Appends the string representation of each entry of {@code map}, using the previously
* configured separator and key-value separator, to {@code builder}. Identical to {@link
* #appendTo(Appendable, Map)}, except that it does not throw {@link IOException}.
*/
public StringBuilder appendTo(StringBuilder builder, Map<?, ?> map) {
return appendTo(builder, map.entrySet());
}
/**
* Returns a string containing the string representation of each entry of {@code map}, using the
* previously configured separator and key-value separator.
*/
public String join(Map<?, ?> map) {
return join(map.entrySet());
}
/**
* Appends the string representation of each entry in {@code entries}, using the previously
* configured separator and key-value separator, to {@code appendable}.
*
* @since 10.0
*/
@Beta
public <A extends Appendable> A appendTo(A appendable, Iterable<? extends Entry<?, ?>> entries)
throws IOException {
return appendTo(appendable, entries.iterator());
}
/**
* Appends the string representation of each entry in {@code entries}, using the previously
* configured separator and key-value separator, to {@code appendable}.
*
* @since 11.0
*/
@Beta
public <A extends Appendable> A appendTo(A appendable, Iterator<? extends Entry<?, ?>> parts)
throws IOException {
checkNotNull(appendable);
if (parts.hasNext()) {
Entry<?, ?> entry = parts.next();
appendable.append(joiner.toString(entry.getKey()));
appendable.append(keyValueSeparator);
appendable.append(joiner.toString(entry.getValue()));
while (parts.hasNext()) {
appendable.append(joiner.separator);
Entry<?, ?> e = parts.next();
appendable.append(joiner.toString(e.getKey()));
appendable.append(keyValueSeparator);
appendable.append(joiner.toString(e.getValue()));
}
}
return appendable;
}
/**
* Appends the string representation of each entry in {@code entries}, using the previously
* configured separator and key-value separator, to {@code builder}. Identical to {@link
* #appendTo(Appendable, Iterable)}, except that it does not throw {@link IOException}.
*
* @since 10.0
*/
@Beta
public StringBuilder appendTo(StringBuilder builder, Iterable<? extends Entry<?, ?>> entries) {
return appendTo(builder, entries.iterator());
}
/**
* Appends the string representation of each entry in {@code entries}, using the previously
* configured separator and key-value separator, to {@code builder}. Identical to {@link
* #appendTo(Appendable, Iterable)}, except that it does not throw {@link IOException}.
*
* @since 11.0
*/
@Beta
public StringBuilder appendTo(StringBuilder builder, Iterator<? extends Entry<?, ?>> entries) {
try {
appendTo((Appendable) builder, entries);
} catch (IOException impossible) {
throw new AssertionError(impossible);
}
return builder;
}
/**
* Returns a string containing the string representation of each entry in {@code entries}, using
* the previously configured separator and key-value separator.
*
* @since 10.0
*/
@Beta
public String join(Iterable<? extends Entry<?, ?>> entries) {
return join(entries.iterator());
}
/**
* Returns a string containing the string representation of each entry in {@code entries}, using
* the previously configured separator and key-value separator.
*
* @since 11.0
*/
@Beta
public String join(Iterator<? extends Entry<?, ?>> entries) {
return appendTo(new StringBuilder(), entries).toString();
}
/**
* Returns a map joiner with the same behavior as this one, except automatically substituting
* {@code nullText} for any provided null keys or values.
*/
@CheckReturnValue
public MapJoiner useForNull(String nullText) {
return new MapJoiner(joiner.useForNull(nullText), keyValueSeparator);
}
}
CharSequence toString(Object part) {
checkNotNull(part); // checkNotNull for GWT (do not optimize).
return (part instanceof CharSequence) ? (CharSequence) part : part.toString();
}
private static Iterable<Object> iterable(
final Object first, final Object second, final Object[] rest) {
checkNotNull(rest);
return new AbstractList<Object>() {
@Override public int size() {
return rest.length + 2;
}
@Override public Object get(int index) {
switch (index) {
case 0:
return first;
case 1:
return second;
default:
return rest[index - 2];
}
}
};
}
}
| |
/*
* Licensed to The Apereo Foundation under one or more contributor license
* agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*
* The Apereo Foundation licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.unitime.timetable.reports.exam;
import java.io.File;
import java.io.IOException;
import java.util.Calendar;
import java.util.Collection;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.Locale;
import java.util.TreeSet;
import java.util.Vector;
import org.apache.log4j.Logger;
import org.unitime.timetable.model.ExamPeriod;
import org.unitime.timetable.model.ExamType;
import org.unitime.timetable.model.Session;
import org.unitime.timetable.model.SubjectArea;
import org.unitime.timetable.solver.exam.ui.ExamAssignmentInfo;
import org.unitime.timetable.solver.exam.ui.ExamRoomInfo;
import org.unitime.timetable.solver.exam.ui.ExamInfo.ExamSectionInfo;
import com.lowagie.text.DocumentException;
/**
* @author Tomas Muller
*/
public class ExamPeriodChartReport extends PdfLegacyExamReport {
protected static Logger sLog = Logger.getLogger(ScheduleByCourseReport.class);
public ExamPeriodChartReport(int mode, File file, Session session, ExamType examType, Collection<SubjectArea> subjectAreas, Collection<ExamAssignmentInfo> exams) throws IOException, DocumentException {
super(mode, file, "PERIOD ASSIGNMENT", session, examType, subjectAreas, exams);
}
public void printReport() throws DocumentException {
if (iRC!=null && iRC.length()>0)
setFooter(iRC+(iLimit>=0?" (limit="+iLimit+")":""));
else if (iLimit>=0)
setFooter("limit="+iLimit);
Hashtable<ExamPeriod,TreeSet<ExamAssignmentInfo>> period2exams = new Hashtable();
for (ExamAssignmentInfo exam : getExams()) {
if (exam.getPeriod()==null || !hasSubjectArea(exam)) continue;
TreeSet<ExamAssignmentInfo> exams = period2exams.get(exam.getPeriod());
if (exams==null) {
exams = new TreeSet();
period2exams.put(exam.getPeriod(),exams);
}
exams.add(exam);
}
HashMap<Integer,String> times = new HashMap<Integer, String>();
HashMap<Integer,String> fixedTimes = new HashMap<Integer, String>();
HashMap<Integer,String> days = new HashMap<Integer, String>();
for (Iterator i=ExamPeriod.findAll(getSession().getUniqueId(), getExamType()).iterator();i.hasNext();) {
ExamPeriod period = (ExamPeriod)i.next();
times.put(period.getStartSlot(), period.getStartTimeLabel());
days.put(period.getDateOffset(), period.getStartDateLabel());
fixedTimes.put(period.getStartSlot(), lpad(period.getStartTimeLabel(),'0',6));
}
boolean headerPrinted = false;
Hashtable totalADay = new Hashtable();
String timesThisPage = null;
int nrCols = 0;
if (!iTotals) {
if (iCompact) {
setHeader(new String[] {
"Start Time Exam Enrl Exam Enrl Exam Enrl Exam Enrl Exam Enrl",
"---------- --------------- ---- --------------- ---- --------------- ---- --------------- ---- --------------- ----"
});
} else {
setHeader(new String[] {
"Start Time Exam Enrl Exam Enrl Exam Enrl Exam Enrl",
"---------- ------------------------ ---- ------------------------ ---- ------------------------ ---- ------------------------ ----"
// .........1.........2.........3.........4.........5.........6.........7.........8.........9........10........11........12........13...
});
}
printHeader();
}
int lastDIdx = 0;
boolean firstLine = true;
for (int dIdx = 0; dIdx < days.size(); dIdx+=nrCols) {
for (int time: new TreeSet<Integer>(times.keySet())) {
int offset = 0;
String timeStr = times.get(time);
String header1 = "";
String header2 = "";
String header3 = "";
Vector periods = new Vector();
int idx = 0;
String firstDay = null; int firstDayOffset = 0;
String lastDay = null;
nrCols = 0;
for (Iterator<Integer> f = new TreeSet<Integer>(days.keySet()).iterator(); f.hasNext(); idx++) {
int day = f.next();
String dayStr = days.get(day);
if (idx<dIdx || nrCols==(iCompact?iTotals?6:5:4)) continue;
if (firstDay!=null && (dayStr.startsWith("Mon") || day>=firstDayOffset+7)) break;
if (firstDay==null) {
firstDay = dayStr; firstDayOffset = day;
Calendar c = Calendar.getInstance(Locale.US);
c.setTime(getSession().getExamBeginDate());
c.add(Calendar.DAY_OF_YEAR, day);
if (!iTotals) {
offset = (c.get(Calendar.DAY_OF_WEEK)+5)%7;
firstDayOffset -= offset;
}
}
lastDay = dayStr;
if (iCompact) {
header1 += mpad(dayStr,20)+" ";
header2 += "Exam Enrl ";
header3 += "=============== ==== ";
} else {
header1 += mpad(dayStr,29)+" ";
header2 += "Exam Enrl ";
header3 += "======================== ==== ";
}
ExamPeriod period = null;
nrCols++;
for (Iterator i=ExamPeriod.findAll(getSession().getUniqueId(), getExamType()).iterator();i.hasNext();) {
ExamPeriod p = (ExamPeriod)i.next();
if (time!=p.getStartSlot() || day!=p.getDateOffset()) continue;
period = p; break;
}
periods.add(period);
}
if (iTotals)
setHeader(new String[] {timeStr,header1,header2,header3});
else if (offset + periods.size() > (iCompact?iTotals?6:5:4))
offset = Math.max(0, (iCompact?iTotals?6:5:4) - periods.size());
int nextLines = 0;
for (Enumeration f=periods.elements();f.hasMoreElements();) {
ExamPeriod period = (ExamPeriod)f.nextElement();
if (period==null) continue;
TreeSet<ExamAssignmentInfo> exams = period2exams.get(period);
if (exams==null) continue;
int linesThisSections = 6;
for (ExamAssignmentInfo exam : exams) {
int size = 0;
for (ExamSectionInfo section: exam.getSectionsIncludeCrosslistedDummies()) size+= section.getNrStudents();
if (iLimit<0 || size>=iLimit) {
for (ExamSectionInfo section: exam.getSectionsIncludeCrosslistedDummies())
if (hasSubjectArea(section)) linesThisSections++;
}
}
nextLines = Math.max(nextLines,linesThisSections);
}
if (iTotals) {
if (!headerPrinted) {
printHeader();
setPageName(timeStr+(days.size()>nrCols?" ("+firstDay+" - "+lastDay+")":""));
setCont(timeStr+(days.size()>nrCols?" ("+firstDay+" - "+lastDay+")":""));
timesThisPage = timeStr;
} else if (timesThisPage!=null && getLineNumber()+nextLines<=iNrLines) {
println("");
println(timeStr);
println(header1);
println(header2);
println(header3);
timesThisPage += ", "+timeStr;
setPageName(timesThisPage+(days.size()>nrCols?" ("+firstDay+" - "+lastDay+")":""));
setCont(timesThisPage+(days.size()>nrCols?" ("+firstDay+" - "+lastDay+")":""));
} else {
newPage();
timesThisPage = timeStr;
setPageName(timeStr+(days.size()>nrCols?" ("+firstDay+" - "+lastDay+")":""));
setCont(timeStr+(days.size()>nrCols?" ("+firstDay+" - "+lastDay+")":""));
}
} else {
if (nextLines==0) continue;
if (!iNewPage && !firstLine) {
if (lastDIdx!=dIdx) {
if (iCompact)
println("---------- --------------- ---- --------------- ---- --------------- ---- --------------- ---- --------------- ----");
else
println("---------- ------------------------ ---- ------------------------ ---- ------------------------ ---- ------------------------ ----");
lastDIdx = dIdx;
} else {
if (iCompact)
println(" --------------- ---- --------------- ---- --------------- ---- --------------- ---- --------------- ----");
else
println(" ------------------------ ---- ------------------------ ---- ------------------------ ---- ------------------------ ----");
}
}
firstLine = false;
setCont(firstDay+" - "+lastDay+" "+fixedTimes.get(time));
setPageName(firstDay+" - "+lastDay+" "+fixedTimes.get(time));
}
headerPrinted = true;
int max = 0;
Vector lines = new Vector();
for (Enumeration f=periods.elements();f.hasMoreElements();) {
ExamPeriod period = (ExamPeriod)f.nextElement();
if (period==null) {
Vector linesThisPeriod = new Vector();
linesThisPeriod.add(lpad("0",iCompact ? 20 : 29));
lines.add(linesThisPeriod);
continue;
}
TreeSet<ExamAssignmentInfo> exams = period2exams.get(period);
if (exams==null) exams = new TreeSet();
Vector linesThisPeriod = new Vector();
int total = 0;
int totalListed = 0;
for (ExamAssignmentInfo exam : exams) {
boolean sizePrinted = false;
int size = 0;
for (ExamSectionInfo section: exam.getSectionsIncludeCrosslistedDummies()) size+= section.getNrStudents();
for (ExamSectionInfo section : exam.getSectionsIncludeCrosslistedDummies()) {
if (!hasSubjectArea(section)) continue;
total += section.getNrStudents();
if (iLimit>=0 && size<iLimit) continue;
totalListed += section.getNrStudents();
String code = null;
if (iRoomCodes!=null && !iRoomCodes.isEmpty()) {
for (ExamRoomInfo room : section.getExamAssignment().getRooms()) {
String c = iRoomCodes.get(room.getName());
if (c!=null) code = c; break;
}
}
if (iCompact) {
linesThisPeriod.add(
rpad(section.getSubject(),7)+
rpad(section.getCourseNbr(),8)+
(sizePrinted||code==null||code.length()==0?' ':code.charAt(0))+
lpad(sizePrinted?"":String.valueOf(size),4));
} else {
if (iItype) {
if (iExternal) {
linesThisPeriod.add(
rpad(section.getSubject(),7)+
rpad(section.getCourseNbr(),8)+
rpad(section.getItype(),9)+
(sizePrinted||code==null||code.length()==0?' ':code.charAt(0))+
lpad(sizePrinted?"":String.valueOf(size),4));
} else {
linesThisPeriod.add(
rpad(section.getName(),24)+(sizePrinted||code==null||code.length()==0?' ':code.charAt(0))+
lpad(sizePrinted?"":String.valueOf(size),4));
}
} else {
linesThisPeriod.add(
rpad(section.getSubject(),7)+
rpad(section.getCourseNbr(),8)+
rpad(section.getSection(),9)+
(sizePrinted||code==null||code.length()==0?' ':code.charAt(0))+
lpad(sizePrinted?"":String.valueOf(size),4));
}
}
sizePrinted = true;
}
}
if (iCompact) {
if (iTotals) {
if (totalListed!=total)
linesThisPeriod.insertElementAt(mpad("("+totalListed+")",13)+" "+lpad(""+total,6), 0);
else
linesThisPeriod.insertElementAt(lpad(""+total,20), 0);
} else {
linesThisPeriod.insertElementAt(rpad(period.getStartDateLabel(),13)+" "+lpad(total==0?"":(""+total),6), 0);
}
} else {
if (iTotals) {
if (totalListed!=total)
linesThisPeriod.insertElementAt(mpad("("+totalListed+")",22)+" "+lpad(""+total,6), 0);
else
linesThisPeriod.insertElementAt(lpad(""+total,29), 0);
} else {
linesThisPeriod.insertElementAt(rpad(period.getStartDateLabel(),22)+" "+lpad(total==0?"":(""+total),6), 0);
}
}
max = Math.max(max, linesThisPeriod.size());
Integer td = (Integer)totalADay.get(period.getDateOffset());
totalADay.put(period.getDateOffset(),new Integer(total+(td==null?0:td.intValue())));
lines.add(linesThisPeriod);
}
for (int i=0;i<max;i++) {
String line = "";
if (!iTotals) {
if (iCompact) {
if (i==0 || iNewPage)
line += rpad(fixedTimes.get(time),12)+rpad("",offset*22);
else
line += rpad("",12)+rpad("",offset*22);
} else {
if (i==0 || iNewPage)
line += rpad(fixedTimes.get(time),11)+rpad("",offset*31);
else
line += rpad("",11)+rpad("",offset*31);
}
}
for (Enumeration f=lines.elements();f.hasMoreElements();) {
Vector linesThisPeriod = (Vector)f.nextElement();
if (i<linesThisPeriod.size())
line += (String)linesThisPeriod.elementAt(i);
else
line += rpad("",iCompact ? 20 : 29);
if (f.hasMoreElements()) line += " ";
}
println(line);
}
setCont(null);
}
if (iTotals) {
if (getLineNumber()+5>iNrLines) {
newPage();
setPageName("Totals");
} else
println("");
println("Total Student Exams");
String line1 = "", line2 = "", line3 = "";
int idx = 0;
for (Iterator<Integer> f = new TreeSet<Integer>(days.keySet()).iterator(); f.hasNext(); idx++) {
int day = f.next();
if (idx<dIdx || idx>=dIdx+nrCols) continue;
if (iCompact) {
line1 += mpad((String)days.get(day),20)+" ";
line2 += "=============== ==== ";
line3 += lpad(totalADay.get(day)==null?"":totalADay.get(day).toString(),20)+" ";
} else {
line1 += mpad((String)days.get(day),29)+" ";
line2 += "======================== ==== ";
line3 += lpad(totalADay.get(day)==null?"":totalADay.get(day).toString(),29)+" ";
}
}
println(line1);
println(line2);
println(line3);
timesThisPage = null;
}
}
lastPage();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db;
import java.io.Closeable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.TreeSet;
import com.google.common.base.Function;
import com.google.common.collect.Iterables;
import com.google.common.collect.Iterators;
import net.nicoulaj.compilecommand.annotations.Inline;
import org.apache.cassandra.concurrent.Stage;
import org.apache.cassandra.concurrent.StageManager;
import org.apache.cassandra.db.columniterator.OnDiskAtomIterator;
import org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy;
import org.apache.cassandra.db.composites.CellName;
import org.apache.cassandra.db.filter.NamesQueryFilter;
import org.apache.cassandra.db.filter.QueryFilter;
import org.apache.cassandra.db.marshal.CounterColumnType;
import org.apache.cassandra.io.sstable.format.SSTableReader;
import org.apache.cassandra.io.util.FileUtils;
import org.apache.cassandra.tracing.Tracing;
import org.apache.cassandra.utils.SearchIterator;
import org.apache.cassandra.utils.memory.HeapAllocator;
public class CollationController
{
private final ColumnFamilyStore cfs;
private final QueryFilter filter;
private final int gcBefore;
private int sstablesIterated = 0;
public CollationController(ColumnFamilyStore cfs, QueryFilter filter, int gcBefore)
{
this.cfs = cfs;
this.filter = filter;
this.gcBefore = gcBefore;
}
public ColumnFamily getTopLevelColumns(boolean copyOnHeap)
{
return filter.filter instanceof NamesQueryFilter
&& cfs.metadata.getDefaultValidator() != CounterColumnType.instance
? collectTimeOrderedData(copyOnHeap)
: collectAllData(copyOnHeap);
}
/**
* Collects data in order of recency, using the sstable maxtimestamp data.
* Once we have data for all requests columns that is newer than the newest remaining maxtimestamp,
* we stop.
*/
private ColumnFamily collectTimeOrderedData(boolean copyOnHeap)
{
final ColumnFamily container = ArrayBackedSortedColumns.factory.create(cfs.metadata, filter.filter.isReversed());
List<OnDiskAtomIterator> iterators = new ArrayList<>();
boolean isEmpty = true;
Tracing.trace("Acquiring sstable references");
ColumnFamilyStore.ViewFragment view = cfs.select(cfs.viewFilter(filter.key));
try
{
Tracing.trace("Merging memtable contents");
long mostRecentRowTombstone = Long.MIN_VALUE;
for (Memtable memtable : view.memtables)
{
ColumnFamily cf = memtable.getColumnFamily(filter.key);
if (cf != null)
{
filter.delete(container.deletionInfo(), cf);
isEmpty = false;
Iterator<Cell> iter = filter.getIterator(cf);
while (iter.hasNext())
{
Cell cell = iter.next();
if (copyOnHeap)
cell = cell.localCopy(cfs.metadata, HeapAllocator.instance);
container.addColumn(cell);
}
}
mostRecentRowTombstone = container.deletionInfo().getTopLevelDeletion().markedForDeleteAt;
}
// avoid changing the filter columns of the original filter
// (reduceNameFilter removes columns that are known to be irrelevant)
NamesQueryFilter namesFilter = (NamesQueryFilter) filter.filter;
TreeSet<CellName> filterColumns = new TreeSet<>(namesFilter.columns);
QueryFilter reducedFilter = new QueryFilter(filter.key, filter.cfName, namesFilter.withUpdatedColumns(filterColumns), filter.timestamp);
/* add the SSTables on disk */
Collections.sort(view.sstables, SSTableReader.maxTimestampComparator);
// read sorted sstables
for (SSTableReader sstable : view.sstables)
{
// if we've already seen a row tombstone with a timestamp greater
// than the most recent update to this sstable, we're done, since the rest of the sstables
// will also be older
if (sstable.getMaxTimestamp() < mostRecentRowTombstone)
break;
long currentMaxTs = sstable.getMaxTimestamp();
reduceNameFilter(reducedFilter, container, currentMaxTs);
if (((NamesQueryFilter) reducedFilter.filter).columns.isEmpty())
break;
Tracing.trace("Merging data from sstable {}", sstable.descriptor.generation);
sstable.incrementReadCount();
OnDiskAtomIterator iter = reducedFilter.getSSTableColumnIterator(sstable);
iterators.add(iter);
isEmpty = false;
if (iter.getColumnFamily() != null)
{
container.delete(iter.getColumnFamily());
sstablesIterated++;
while (iter.hasNext())
container.addAtom(iter.next());
}
mostRecentRowTombstone = container.deletionInfo().getTopLevelDeletion().markedForDeleteAt;
}
// we need to distinguish between "there is no data at all for this row" (BF will let us rebuild that efficiently)
// and "there used to be data, but it's gone now" (we should cache the empty CF so we don't need to rebuild that slower)
if (isEmpty)
return null;
// do a final collate. toCollate is boilerplate required to provide a CloseableIterator
ColumnFamily returnCF = container.cloneMeShallow();
Tracing.trace("Collating all results");
filter.collateOnDiskAtom(returnCF, container.iterator(), gcBefore);
// "hoist up" the requested data into a more recent sstable
if (sstablesIterated > cfs.getMinimumCompactionThreshold()
&& !cfs.isAutoCompactionDisabled()
&& cfs.getCompactionStrategy().shouldDefragment())
{
// !!WARNING!! if we stop copying our data to a heap-managed object,
// we will need to track the lifetime of this mutation as well
Tracing.trace("Defragmenting requested data");
final Mutation mutation = new Mutation(cfs.keyspace.getName(), filter.key.getKey(), returnCF.cloneMe());
StageManager.getStage(Stage.MUTATION).execute(new Runnable()
{
public void run()
{
// skipping commitlog and index updates is fine since we're just de-fragmenting existing data
Keyspace.open(mutation.getKeyspaceName()).apply(mutation, false, false);
}
});
}
// Caller is responsible for final removeDeletedCF. This is important for cacheRow to work correctly:
return returnCF;
}
finally
{
for (OnDiskAtomIterator iter : iterators)
FileUtils.closeQuietly(iter);
}
}
/**
* remove columns from @param filter where we already have data in @param container newer than @param sstableTimestamp
*/
private void reduceNameFilter(QueryFilter filter, ColumnFamily container, long sstableTimestamp)
{
if (container == null)
return;
SearchIterator<CellName, Cell> searchIter = container.searchIterator();
for (Iterator<CellName> iterator = ((NamesQueryFilter) filter.filter).columns.iterator(); iterator.hasNext() && searchIter.hasNext(); )
{
CellName filterColumn = iterator.next();
Cell cell = searchIter.next(filterColumn);
if (cell != null && cell.timestamp() > sstableTimestamp)
iterator.remove();
}
}
/**
* Collects data the brute-force way: gets an iterator for the filter in question
* from every memtable and sstable, then merges them together.
*/
private ColumnFamily collectAllData(boolean copyOnHeap)
{
Tracing.trace("Acquiring sstable references");
ColumnFamilyStore.ViewFragment view = cfs.select(cfs.viewFilter(filter.key));
List<Iterator<? extends OnDiskAtom>> iterators = new ArrayList<>(Iterables.size(view.memtables) + view.sstables.size());
ColumnFamily returnCF = ArrayBackedSortedColumns.factory.create(cfs.metadata, filter.filter.isReversed());
DeletionInfo returnDeletionInfo = returnCF.deletionInfo();
try
{
Tracing.trace("Merging memtable tombstones");
for (Memtable memtable : view.memtables)
{
final ColumnFamily cf = memtable.getColumnFamily(filter.key);
if (cf != null)
{
filter.delete(returnDeletionInfo, cf);
Iterator<Cell> iter = filter.getIterator(cf);
if (copyOnHeap)
{
iter = Iterators.transform(iter, new Function<Cell, Cell>()
{
public Cell apply(Cell cell)
{
return cell.localCopy(cf.metadata, HeapAllocator.instance);
}
});
}
iterators.add(iter);
}
}
/*
* We can't eliminate full sstables based on the timestamp of what we've already read like
* in collectTimeOrderedData, but we still want to eliminate sstable whose maxTimestamp < mostRecentTombstone
* we've read. We still rely on the sstable ordering by maxTimestamp since if
* maxTimestamp_s1 > maxTimestamp_s0,
* we're guaranteed that s1 cannot have a row tombstone such that
* timestamp(tombstone) > maxTimestamp_s0
* since we necessarily have
* timestamp(tombstone) <= maxTimestamp_s1
* In other words, iterating in maxTimestamp order allow to do our mostRecentTombstone elimination
* in one pass, and minimize the number of sstables for which we read a rowTombstone.
*/
Collections.sort(view.sstables, SSTableReader.maxTimestampComparator);
List<SSTableReader> skippedSSTables = null;
long mostRecentRowTombstone = Long.MIN_VALUE;
long minTimestamp = Long.MAX_VALUE;
int nonIntersectingSSTables = 0;
for (SSTableReader sstable : view.sstables)
{
minTimestamp = Math.min(minTimestamp, sstable.getMinTimestamp());
// if we've already seen a row tombstone with a timestamp greater
// than the most recent update to this sstable, we can skip it
if (sstable.getMaxTimestamp() < mostRecentRowTombstone)
break;
if (!filter.shouldInclude(sstable))
{
nonIntersectingSSTables++;
// sstable contains no tombstone if maxLocalDeletionTime == Integer.MAX_VALUE, so we can safely skip those entirely
if (sstable.getSSTableMetadata().maxLocalDeletionTime != Integer.MAX_VALUE)
{
if (skippedSSTables == null)
skippedSSTables = new ArrayList<>();
skippedSSTables.add(sstable);
}
continue;
}
sstable.incrementReadCount();
OnDiskAtomIterator iter = filter.getSSTableColumnIterator(sstable);
iterators.add(iter);
if (iter.getColumnFamily() != null)
{
ColumnFamily cf = iter.getColumnFamily();
if (cf.isMarkedForDelete())
mostRecentRowTombstone = cf.deletionInfo().getTopLevelDeletion().markedForDeleteAt;
returnCF.delete(cf);
sstablesIterated++;
}
}
int includedDueToTombstones = 0;
// Check for row tombstone in the skipped sstables
if (skippedSSTables != null)
{
for (SSTableReader sstable : skippedSSTables)
{
if (sstable.getMaxTimestamp() <= minTimestamp)
continue;
sstable.incrementReadCount();
OnDiskAtomIterator iter = filter.getSSTableColumnIterator(sstable);
ColumnFamily cf = iter.getColumnFamily();
// we are only interested in row-level tombstones here, and only if markedForDeleteAt is larger than minTimestamp
if (cf != null && cf.deletionInfo().getTopLevelDeletion().markedForDeleteAt > minTimestamp)
{
includedDueToTombstones++;
iterators.add(iter);
returnCF.delete(cf.deletionInfo().getTopLevelDeletion());
sstablesIterated++;
}
else
{
FileUtils.closeQuietly(iter);
}
}
}
if (Tracing.isTracing())
Tracing.trace("Skipped {}/{} non-slice-intersecting sstables, included {} due to tombstones", new Object[] {nonIntersectingSSTables, view.sstables.size(), includedDueToTombstones});
// we need to distinguish between "there is no data at all for this row" (BF will let us rebuild that efficiently)
// and "there used to be data, but it's gone now" (we should cache the empty CF so we don't need to rebuild that slower)
if (iterators.isEmpty())
return null;
Tracing.trace("Merging data from memtables and {} sstables", sstablesIterated);
filter.collateOnDiskAtom(returnCF, iterators, gcBefore);
// Caller is responsible for final removeDeletedCF. This is important for cacheRow to work correctly:
return returnCF;
}
finally
{
for (Object iter : iterators)
if (iter instanceof Closeable)
FileUtils.closeQuietly((Closeable) iter);
}
}
public int getSstablesIterated()
{
return sstablesIterated;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.carbondata.processing.surrogatekeysgenerator.csvbased;
import java.io.BufferedWriter;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.nio.charset.Charset;
import java.util.HashMap;
import java.util.Map;
import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datastorage.store.filesystem.CarbonFile;
import org.apache.carbondata.core.datastorage.store.impl.FileFactory;
import org.apache.carbondata.core.datastorage.store.impl.FileFactory.FileType;
import org.apache.carbondata.core.util.CarbonUtil;
public class BadRecordslogger {
/**
* Comment for <code>LOGGER</code>
*/
private static final LogService LOGGER =
LogServiceFactory.getLogService(BadRecordslogger.class.getName());
/**
* Which holds the key and if any bad rec found to check from API to update
* the status
*/
private static Map<String, String> badRecordEntry =
new HashMap<String, String>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
/**
* File Name
*/
private String fileName;
/**
* Store path
*/
private String storePath;
/**
* FileChannel
*/
private BufferedWriter bufferedWriter;
private DataOutputStream outStream;
/**
* csv file writer
*/
private BufferedWriter bufferedCSVWriter;
private DataOutputStream outCSVStream;
/**
*
*/
private CarbonFile logFile;
/**
* task key which is DatabaseName/TableName/tablename
*/
private String taskKey;
// private final Object syncObject =new Object();
public BadRecordslogger(String key, String fileName, String storePath) {
// Initially no bad rec
taskKey = key;
this.fileName = fileName;
this.storePath = storePath;
}
/**
* @param key DatabaseName/TableName/tablename
* @return return "Partially" and remove from map
*/
public static String hasBadRecord(String key) {
return badRecordEntry.remove(key);
}
public void addBadRecordsToBuilder(Object[] row, String reason, String valueComparer,
boolean badRecordsLogRedirect, boolean badRecordLoggerEnable) {
if (badRecordsLogRedirect || badRecordLoggerEnable) {
StringBuilder logStrings = new StringBuilder();
int size = row.length;
int count = size;
for (int i = 0; i < size; i++) {
if (null == row[i]) {
char ch =
logStrings.length() > 0 ? logStrings.charAt(logStrings.length() - 1) : (char) -1;
if (ch == ',') {
logStrings = logStrings.deleteCharAt(logStrings.lastIndexOf(","));
}
break;
} else if (CarbonCommonConstants.MEMBER_DEFAULT_VAL.equals(row[i].toString())) {
logStrings.append(valueComparer);
} else {
logStrings.append(row[i]);
}
if (count > 1) {
logStrings.append(',');
}
count--;
}
if (badRecordsLogRedirect) {
writeBadRecordsToCSVFile(logStrings);
}
if (badRecordLoggerEnable) {
logStrings.append("----->");
if (null != reason) {
if (reason.indexOf(CarbonCommonConstants.MEMBER_DEFAULT_VAL) > -1) {
logStrings
.append(reason.replace(CarbonCommonConstants.MEMBER_DEFAULT_VAL, valueComparer));
} else {
logStrings.append(reason);
}
}
writeBadRecordsToFile(logStrings);
}
} else {
// setting partial success entry since even if bad records are there then load
// status should be partial success regardless of bad record logged
badRecordEntry.put(taskKey, "Partially");
}
}
/**
*
*/
private synchronized void writeBadRecordsToFile(StringBuilder logStrings) {
if (null == logFile) {
String filePath =
this.storePath + File.separator + this.fileName + CarbonCommonConstants.LOG_FILE_EXTENSION
+ CarbonCommonConstants.FILE_INPROGRESS_STATUS;
logFile = FileFactory.getCarbonFile(filePath, FileFactory.getFileType(filePath));
}
try {
if (null == bufferedWriter) {
FileType fileType = FileFactory.getFileType(storePath);
if (!FileFactory.isFileExist(this.storePath, fileType)) {
// create the folders if not exist
FileFactory.mkdirs(this.storePath, fileType);
// create the files
FileFactory.createNewFile(logFile.getPath(), fileType);
}
outStream = FileFactory.getDataOutputStream(logFile.getPath(), fileType);
bufferedWriter = new BufferedWriter(new OutputStreamWriter(outStream,
Charset.forName(CarbonCommonConstants.DEFAULT_CHARSET)));
}
bufferedWriter.write(logStrings.toString());
bufferedWriter.newLine();
} catch (FileNotFoundException e) {
LOGGER.error("Bad Log Files not found");
} catch (IOException e) {
LOGGER.error("Error While writing bad log File");
} finally {
// if the Bad record file is created means it partially success
// if any entry present with key that means its have bad record for
// that key
badRecordEntry.put(taskKey, "Partially");
}
}
/**
*
*/
private synchronized void writeBadRecordsToCSVFile(StringBuilder logStrings) {
String filePath =
this.storePath + File.separator + this.fileName + CarbonCommonConstants.CSV_FILE_EXTENSION
+ CarbonCommonConstants.FILE_INPROGRESS_STATUS;
try {
if (null == bufferedCSVWriter) {
FileType fileType = FileFactory.getFileType(storePath);
if (!FileFactory.isFileExist(this.storePath, fileType)) {
// create the folders if not exist
FileFactory.mkdirs(this.storePath, fileType);
// create the files
FileFactory.createNewFile(filePath, fileType);
}
outCSVStream = FileFactory.getDataOutputStream(filePath, fileType);
bufferedCSVWriter = new BufferedWriter(new OutputStreamWriter(outCSVStream,
Charset.forName(CarbonCommonConstants.DEFAULT_CHARSET)));
}
bufferedCSVWriter.write(logStrings.toString());
bufferedCSVWriter.newLine();
} catch (FileNotFoundException e) {
LOGGER.error("Bad Log Files not found");
} catch (IOException e) {
LOGGER.error("Error While writing bad log File");
}
finally {
badRecordEntry.put(taskKey, "Partially");
}
}
/**
* closeStreams void
*/
public synchronized void closeStreams() {
CarbonUtil.closeStreams(bufferedWriter, outStream, bufferedCSVWriter, outCSVStream);
}
}
| |
// Copyright 2018 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* TrialAsyncErrorServiceSoapBindingStub.java
*
* This file was auto-generated from WSDL
* by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter.
*/
package com.google.api.ads.adwords.axis.v201809.cm;
public class TrialAsyncErrorServiceSoapBindingStub extends org.apache.axis.client.Stub implements com.google.api.ads.adwords.axis.v201809.cm.TrialAsyncErrorServiceInterface {
private java.util.Vector cachedSerClasses = new java.util.Vector();
private java.util.Vector cachedSerQNames = new java.util.Vector();
private java.util.Vector cachedSerFactories = new java.util.Vector();
private java.util.Vector cachedDeserFactories = new java.util.Vector();
static org.apache.axis.description.OperationDesc [] _operations;
static {
_operations = new org.apache.axis.description.OperationDesc[2];
_initOperationDesc1();
}
private static void _initOperationDesc1(){
org.apache.axis.description.OperationDesc oper;
org.apache.axis.description.ParameterDesc param;
oper = new org.apache.axis.description.OperationDesc();
oper.setName("get");
param = new org.apache.axis.description.ParameterDesc(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "selector"), org.apache.axis.description.ParameterDesc.IN, new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "Selector"), com.google.api.ads.adwords.axis.v201809.cm.Selector.class, false, false);
param.setOmittable(true);
oper.addParameter(param);
oper.setReturnType(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "TrialAsyncErrorPage"));
oper.setReturnClass(com.google.api.ads.adwords.axis.v201809.cm.TrialAsyncErrorPage.class);
oper.setReturnQName(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "rval"));
oper.setStyle(org.apache.axis.constants.Style.WRAPPED);
oper.setUse(org.apache.axis.constants.Use.LITERAL);
oper.addFault(new org.apache.axis.description.FaultDesc(
new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "ApiExceptionFault"),
"com.google.api.ads.adwords.axis.v201809.cm.ApiException",
new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "ApiException"),
true
));
_operations[0] = oper;
oper = new org.apache.axis.description.OperationDesc();
oper.setName("query");
param = new org.apache.axis.description.ParameterDesc(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "query"), org.apache.axis.description.ParameterDesc.IN, new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "string"), java.lang.String.class, false, false);
param.setOmittable(true);
oper.addParameter(param);
oper.setReturnType(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "TrialAsyncErrorPage"));
oper.setReturnClass(com.google.api.ads.adwords.axis.v201809.cm.TrialAsyncErrorPage.class);
oper.setReturnQName(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "rval"));
oper.setStyle(org.apache.axis.constants.Style.WRAPPED);
oper.setUse(org.apache.axis.constants.Use.LITERAL);
oper.addFault(new org.apache.axis.description.FaultDesc(
new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "ApiExceptionFault"),
"com.google.api.ads.adwords.axis.v201809.cm.ApiException",
new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "ApiException"),
true
));
_operations[1] = oper;
}
public TrialAsyncErrorServiceSoapBindingStub() throws org.apache.axis.AxisFault {
this(null);
}
public TrialAsyncErrorServiceSoapBindingStub(java.net.URL endpointURL, javax.xml.rpc.Service service) throws org.apache.axis.AxisFault {
this(service);
super.cachedEndpoint = endpointURL;
}
public TrialAsyncErrorServiceSoapBindingStub(javax.xml.rpc.Service service) throws org.apache.axis.AxisFault {
if (service == null) {
super.service = new org.apache.axis.client.Service();
} else {
super.service = service;
}
((org.apache.axis.client.Service)super.service).setTypeMappingVersion("1.2");
java.lang.Class cls;
javax.xml.namespace.QName qName;
javax.xml.namespace.QName qName2;
java.lang.Class beansf = org.apache.axis.encoding.ser.BeanSerializerFactory.class;
java.lang.Class beandf = org.apache.axis.encoding.ser.BeanDeserializerFactory.class;
java.lang.Class enumsf = org.apache.axis.encoding.ser.EnumSerializerFactory.class;
java.lang.Class enumdf = org.apache.axis.encoding.ser.EnumDeserializerFactory.class;
java.lang.Class arraysf = org.apache.axis.encoding.ser.ArraySerializerFactory.class;
java.lang.Class arraydf = org.apache.axis.encoding.ser.ArrayDeserializerFactory.class;
java.lang.Class simplesf = org.apache.axis.encoding.ser.SimpleSerializerFactory.class;
java.lang.Class simpledf = org.apache.axis.encoding.ser.SimpleDeserializerFactory.class;
java.lang.Class simplelistsf = org.apache.axis.encoding.ser.SimpleListSerializerFactory.class;
java.lang.Class simplelistdf = org.apache.axis.encoding.ser.SimpleListDeserializerFactory.class;
addBindings0();
addBindings1();
}
private void addBindings0() {
java.lang.Class cls;
javax.xml.namespace.QName qName;
javax.xml.namespace.QName qName2;
java.lang.Class beansf = org.apache.axis.encoding.ser.BeanSerializerFactory.class;
java.lang.Class beandf = org.apache.axis.encoding.ser.BeanDeserializerFactory.class;
java.lang.Class enumsf = org.apache.axis.encoding.ser.EnumSerializerFactory.class;
java.lang.Class enumdf = org.apache.axis.encoding.ser.EnumDeserializerFactory.class;
java.lang.Class arraysf = org.apache.axis.encoding.ser.ArraySerializerFactory.class;
java.lang.Class arraydf = org.apache.axis.encoding.ser.ArrayDeserializerFactory.class;
java.lang.Class simplesf = org.apache.axis.encoding.ser.SimpleSerializerFactory.class;
java.lang.Class simpledf = org.apache.axis.encoding.ser.SimpleDeserializerFactory.class;
java.lang.Class simplelistsf = org.apache.axis.encoding.ser.SimpleListSerializerFactory.class;
java.lang.Class simplelistdf = org.apache.axis.encoding.ser.SimpleListDeserializerFactory.class;
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "AdError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.AdError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "AdError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.AdErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "AdGroupAdError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.AdGroupAdError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "AdGroupAdError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.AdGroupAdErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "AdGroupCriterionError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.AdGroupCriterionError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "AdGroupCriterionError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.AdGroupCriterionErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "AdGroupFeedError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.AdGroupFeedError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "AdGroupFeedError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.AdGroupFeedErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "AdGroupServiceError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.AdGroupServiceError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "AdGroupServiceError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.AdGroupServiceErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "AdSharingError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.AdSharingError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "AdSharingError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.AdSharingErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "ApiError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.ApiError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "ApiException");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.ApiException.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "ApplicationException");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.ApplicationException.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "AuthenticationError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.AuthenticationError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "AuthenticationError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.AuthenticationErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "AuthorizationError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.AuthorizationError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "AuthorizationError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.AuthorizationErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "BiddingErrors");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.BiddingErrors.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "BiddingErrors.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.BiddingErrorsReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "CampaignBidModifierError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.CampaignBidModifierError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "CampaignBidModifierError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.CampaignBidModifierErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "CampaignCriterionError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.CampaignCriterionError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "CampaignCriterionError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.CampaignCriterionErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "CampaignError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.CampaignError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "CampaignError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.CampaignErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "CampaignFeedError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.CampaignFeedError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "CampaignFeedError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.CampaignFeedErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "CampaignPreferenceError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.CampaignPreferenceError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "CampaignPreferenceError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.CampaignPreferenceErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "CampaignSharedSetError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.CampaignSharedSetError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "CampaignSharedSetError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.CampaignSharedSetErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "ClientTermsError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.ClientTermsError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "ClientTermsError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.ClientTermsErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "CriterionError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.CriterionError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "CriterionError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.CriterionErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "DatabaseError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.DatabaseError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "DatabaseError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.DatabaseErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "DateError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.DateError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "DateError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.DateErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "DateRange");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.DateRange.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "DateRangeError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.DateRangeError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "DateRangeError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.DateRangeErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "DistinctError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.DistinctError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "DistinctError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.DistinctErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "EntityAccessDenied");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.EntityAccessDenied.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "EntityAccessDenied.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.EntityAccessDeniedReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "EntityCountLimitExceeded");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.EntityCountLimitExceeded.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "EntityCountLimitExceeded.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.EntityCountLimitExceededReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "EntityNotFound");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.EntityNotFound.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "EntityNotFound.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.EntityNotFoundReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "FeedError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.FeedError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "FeedError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.FeedErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "FieldPathElement");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.FieldPathElement.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "FunctionError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.FunctionError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "FunctionError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.FunctionErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "IdError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.IdError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "IdError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.IdErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "ImageError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.ImageError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "ImageError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.ImageErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "InternalApiError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.InternalApiError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "InternalApiError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.InternalApiErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "MediaError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.MediaError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "MediaError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.MediaErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "MultiplierError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.MultiplierError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "MultiplierError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.MultiplierErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "NewEntityCreationError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.NewEntityCreationError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "NewEntityCreationError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.NewEntityCreationErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "NotEmptyError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.NotEmptyError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "NotEmptyError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.NotEmptyErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "NullError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.NullError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "NullError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.NullErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "OperationAccessDenied");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.OperationAccessDenied.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "OperationAccessDenied.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.OperationAccessDeniedReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "OperatorError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.OperatorError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "OperatorError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.OperatorErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "OrderBy");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.OrderBy.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "Page");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.Page.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "Paging");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.Paging.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "PagingError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.PagingError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "PagingError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.PagingErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "PolicyViolationError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.PolicyViolationError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "PolicyViolationError.Part");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.PolicyViolationErrorPart.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "PolicyViolationKey");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.PolicyViolationKey.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "Predicate");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.Predicate.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "Predicate.Operator");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.PredicateOperator.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "QueryError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.QueryError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "QueryError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.QueryErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "QuotaCheckError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.QuotaCheckError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "QuotaCheckError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.QuotaCheckErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "RangeError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.RangeError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "RangeError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.RangeErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "RateExceededError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.RateExceededError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "RateExceededError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.RateExceededErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "ReadOnlyError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.ReadOnlyError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "ReadOnlyError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.ReadOnlyErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "RejectedError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.RejectedError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "RejectedError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.RejectedErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "RequestError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.RequestError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
}
private void addBindings1() {
java.lang.Class cls;
javax.xml.namespace.QName qName;
javax.xml.namespace.QName qName2;
java.lang.Class beansf = org.apache.axis.encoding.ser.BeanSerializerFactory.class;
java.lang.Class beandf = org.apache.axis.encoding.ser.BeanDeserializerFactory.class;
java.lang.Class enumsf = org.apache.axis.encoding.ser.EnumSerializerFactory.class;
java.lang.Class enumdf = org.apache.axis.encoding.ser.EnumDeserializerFactory.class;
java.lang.Class arraysf = org.apache.axis.encoding.ser.ArraySerializerFactory.class;
java.lang.Class arraydf = org.apache.axis.encoding.ser.ArrayDeserializerFactory.class;
java.lang.Class simplesf = org.apache.axis.encoding.ser.SimpleSerializerFactory.class;
java.lang.Class simpledf = org.apache.axis.encoding.ser.SimpleDeserializerFactory.class;
java.lang.Class simplelistsf = org.apache.axis.encoding.ser.SimpleListSerializerFactory.class;
java.lang.Class simplelistdf = org.apache.axis.encoding.ser.SimpleListDeserializerFactory.class;
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "RequestError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.RequestErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "RequiredError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.RequiredError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "RequiredError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.RequiredErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "Selector");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.Selector.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "SelectorError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.SelectorError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "SelectorError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.SelectorErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "SettingError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.SettingError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "SettingError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.SettingErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "SizeLimitError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.SizeLimitError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "SizeLimitError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.SizeLimitErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "SoapHeader");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.SoapHeader.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "SoapResponseHeader");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.SoapResponseHeader.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "SortOrder");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.SortOrder.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "StringFormatError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.StringFormatError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "StringFormatError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.StringFormatErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "StringLengthError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.StringLengthError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "StringLengthError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.StringLengthErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "TrialAsyncError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.TrialAsyncError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "TrialAsyncErrorPage");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.TrialAsyncErrorPage.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "UrlError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.UrlError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "UrlError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.UrlErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "VideoError");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.VideoError.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(beansf);
cachedDeserFactories.add(beandf);
qName = new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "VideoError.Reason");
cachedSerQNames.add(qName);
cls = com.google.api.ads.adwords.axis.v201809.cm.VideoErrorReason.class;
cachedSerClasses.add(cls);
cachedSerFactories.add(enumsf);
cachedDeserFactories.add(enumdf);
}
protected org.apache.axis.client.Call createCall() throws java.rmi.RemoteException {
try {
org.apache.axis.client.Call _call = super._createCall();
if (super.maintainSessionSet) {
_call.setMaintainSession(super.maintainSession);
}
if (super.cachedUsername != null) {
_call.setUsername(super.cachedUsername);
}
if (super.cachedPassword != null) {
_call.setPassword(super.cachedPassword);
}
if (super.cachedEndpoint != null) {
_call.setTargetEndpointAddress(super.cachedEndpoint);
}
if (super.cachedTimeout != null) {
_call.setTimeout(super.cachedTimeout);
}
if (super.cachedPortName != null) {
_call.setPortName(super.cachedPortName);
}
java.util.Enumeration keys = super.cachedProperties.keys();
while (keys.hasMoreElements()) {
java.lang.String key = (java.lang.String) keys.nextElement();
_call.setProperty(key, super.cachedProperties.get(key));
}
// All the type mapping information is registered
// when the first call is made.
// The type mapping information is actually registered in
// the TypeMappingRegistry of the service, which
// is the reason why registration is only needed for the first call.
synchronized (this) {
if (firstCall()) {
// must set encoding style before registering serializers
_call.setEncodingStyle(null);
for (int i = 0; i < cachedSerFactories.size(); ++i) {
java.lang.Class cls = (java.lang.Class) cachedSerClasses.get(i);
javax.xml.namespace.QName qName =
(javax.xml.namespace.QName) cachedSerQNames.get(i);
java.lang.Object x = cachedSerFactories.get(i);
if (x instanceof Class) {
java.lang.Class sf = (java.lang.Class)
cachedSerFactories.get(i);
java.lang.Class df = (java.lang.Class)
cachedDeserFactories.get(i);
_call.registerTypeMapping(cls, qName, sf, df, false);
}
else if (x instanceof javax.xml.rpc.encoding.SerializerFactory) {
org.apache.axis.encoding.SerializerFactory sf = (org.apache.axis.encoding.SerializerFactory)
cachedSerFactories.get(i);
org.apache.axis.encoding.DeserializerFactory df = (org.apache.axis.encoding.DeserializerFactory)
cachedDeserFactories.get(i);
_call.registerTypeMapping(cls, qName, sf, df, false);
}
}
}
}
return _call;
}
catch (java.lang.Throwable _t) {
throw new org.apache.axis.AxisFault("Failure trying to get the Call object", _t);
}
}
public com.google.api.ads.adwords.axis.v201809.cm.TrialAsyncErrorPage get(com.google.api.ads.adwords.axis.v201809.cm.Selector selector) throws java.rmi.RemoteException, com.google.api.ads.adwords.axis.v201809.cm.ApiException {
if (super.cachedEndpoint == null) {
throw new org.apache.axis.NoEndPointException();
}
org.apache.axis.client.Call _call = createCall();
_call.setOperation(_operations[0]);
_call.setUseSOAPAction(true);
_call.setSOAPActionURI("");
_call.setEncodingStyle(null);
_call.setProperty(org.apache.axis.client.Call.SEND_TYPE_ATTR, Boolean.FALSE);
_call.setProperty(org.apache.axis.AxisEngine.PROP_DOMULTIREFS, Boolean.FALSE);
_call.setSOAPVersion(org.apache.axis.soap.SOAPConstants.SOAP11_CONSTANTS);
_call.setOperationName(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "get"));
setRequestHeaders(_call);
setAttachments(_call);
try { java.lang.Object _resp = _call.invoke(new java.lang.Object[] {selector});
if (_resp instanceof java.rmi.RemoteException) {
throw (java.rmi.RemoteException)_resp;
}
else {
extractAttachments(_call);
try {
return (com.google.api.ads.adwords.axis.v201809.cm.TrialAsyncErrorPage) _resp;
} catch (java.lang.Exception _exception) {
return (com.google.api.ads.adwords.axis.v201809.cm.TrialAsyncErrorPage) org.apache.axis.utils.JavaUtils.convert(_resp, com.google.api.ads.adwords.axis.v201809.cm.TrialAsyncErrorPage.class);
}
}
} catch (org.apache.axis.AxisFault axisFaultException) {
if (axisFaultException.detail != null) {
if (axisFaultException.detail instanceof java.rmi.RemoteException) {
throw (java.rmi.RemoteException) axisFaultException.detail;
}
if (axisFaultException.detail instanceof com.google.api.ads.adwords.axis.v201809.cm.ApiException) {
throw (com.google.api.ads.adwords.axis.v201809.cm.ApiException) axisFaultException.detail;
}
}
throw axisFaultException;
}
}
public com.google.api.ads.adwords.axis.v201809.cm.TrialAsyncErrorPage query(java.lang.String query) throws java.rmi.RemoteException, com.google.api.ads.adwords.axis.v201809.cm.ApiException {
if (super.cachedEndpoint == null) {
throw new org.apache.axis.NoEndPointException();
}
org.apache.axis.client.Call _call = createCall();
_call.setOperation(_operations[1]);
_call.setUseSOAPAction(true);
_call.setSOAPActionURI("");
_call.setEncodingStyle(null);
_call.setProperty(org.apache.axis.client.Call.SEND_TYPE_ATTR, Boolean.FALSE);
_call.setProperty(org.apache.axis.AxisEngine.PROP_DOMULTIREFS, Boolean.FALSE);
_call.setSOAPVersion(org.apache.axis.soap.SOAPConstants.SOAP11_CONSTANTS);
_call.setOperationName(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/cm/v201809", "query"));
setRequestHeaders(_call);
setAttachments(_call);
try { java.lang.Object _resp = _call.invoke(new java.lang.Object[] {query});
if (_resp instanceof java.rmi.RemoteException) {
throw (java.rmi.RemoteException)_resp;
}
else {
extractAttachments(_call);
try {
return (com.google.api.ads.adwords.axis.v201809.cm.TrialAsyncErrorPage) _resp;
} catch (java.lang.Exception _exception) {
return (com.google.api.ads.adwords.axis.v201809.cm.TrialAsyncErrorPage) org.apache.axis.utils.JavaUtils.convert(_resp, com.google.api.ads.adwords.axis.v201809.cm.TrialAsyncErrorPage.class);
}
}
} catch (org.apache.axis.AxisFault axisFaultException) {
if (axisFaultException.detail != null) {
if (axisFaultException.detail instanceof java.rmi.RemoteException) {
throw (java.rmi.RemoteException) axisFaultException.detail;
}
if (axisFaultException.detail instanceof com.google.api.ads.adwords.axis.v201809.cm.ApiException) {
throw (com.google.api.ads.adwords.axis.v201809.cm.ApiException) axisFaultException.detail;
}
}
throw axisFaultException;
}
}
}
| |
/*
* Copyright (c) 2003, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package sun.reflect.generics.reflectiveObjects;
import java.lang.annotation.*;
import java.lang.reflect.AnnotatedType;
import java.lang.reflect.Array;
import java.lang.reflect.Constructor;
import java.lang.reflect.GenericDeclaration;
import java.lang.reflect.Member;
import java.lang.reflect.Method;
import java.lang.reflect.Type;
import java.lang.reflect.TypeVariable;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Objects;
import sun.reflect.annotation.AnnotationSupport;
import sun.reflect.annotation.TypeAnnotationParser;
import sun.reflect.annotation.AnnotationType;
import sun.reflect.generics.factory.GenericsFactory;
import sun.reflect.generics.tree.FieldTypeSignature;
import sun.reflect.generics.visitor.Reifier;
import sun.reflect.misc.ReflectUtil;
/**
* Implementation of <tt>java.lang.reflect.TypeVariable</tt> interface
* for core reflection.
*/
public class TypeVariableImpl<D extends GenericDeclaration>
extends LazyReflectiveObjectGenerator implements TypeVariable<D> {
D genericDeclaration;
private String name;
// upper bounds - evaluated lazily
private Type[] bounds;
// The ASTs for the bounds. We are required to evaluate the bounds
// lazily, so we store these at least until we are first asked
// for the bounds. This also neatly solves the
// problem with F-bounds - you can't reify them before the formal
// is defined.
private FieldTypeSignature[] boundASTs;
// constructor is private to enforce access through static factory
private TypeVariableImpl(D decl, String n, FieldTypeSignature[] bs,
GenericsFactory f) {
super(f);
genericDeclaration = decl;
name = n;
boundASTs = bs;
}
// Accessors
// accessor for ASTs for bounds. Must not be called after
// bounds have been evaluated, because we might throw the ASTs
// away (but that is not thread-safe, is it?)
private FieldTypeSignature[] getBoundASTs() {
// check that bounds were not evaluated yet
assert(bounds == null);
return boundASTs;
}
/**
* Factory method.
* @param decl - the reflective object that declared the type variable
* that this method should create
* @param name - the name of the type variable to be returned
* @param bs - an array of ASTs representing the bounds for the type
* variable to be created
* @param f - a factory that can be used to manufacture reflective
* objects that represent the bounds of this type variable
* @return A type variable with name, bounds, declaration and factory
* specified
*/
public static <T extends GenericDeclaration>
TypeVariableImpl<T> make(T decl, String name,
FieldTypeSignature[] bs,
GenericsFactory f) {
if (!((decl instanceof Class) ||
(decl instanceof Method) ||
(decl instanceof Constructor))) {
throw new AssertionError("Unexpected kind of GenericDeclaration" +
decl.getClass().toString());
}
return new TypeVariableImpl<T>(decl, name, bs, f);
}
/**
* Returns an array of <tt>Type</tt> objects representing the
* upper bound(s) of this type variable. Note that if no upper bound is
* explicitly declared, the upper bound is <tt>Object</tt>.
*
* <p>For each upper bound B:
* <ul>
* <li>if B is a parameterized type or a type variable, it is created,
* (see {@link #ParameterizedType} for the details of the creation
* process for parameterized types).
* <li>Otherwise, B is resolved.
* </ul>
*
* @throws <tt>TypeNotPresentException</tt> if any of the
* bounds refers to a non-existent type declaration
* @throws <tt>MalformedParameterizedTypeException</tt> if any of the
* bounds refer to a parameterized type that cannot be instantiated
* for any reason
* @return an array of Types representing the upper bound(s) of this
* type variable
*/
public Type[] getBounds() {
// lazily initialize bounds if necessary
if (bounds == null) {
FieldTypeSignature[] fts = getBoundASTs(); // get AST
// allocate result array; note that
// keeping ts and bounds separate helps with threads
Type[] ts = new Type[fts.length];
// iterate over bound trees, reifying each in turn
for ( int j = 0; j < fts.length; j++) {
Reifier r = getReifier();
fts[j].accept(r);
ts[j] = r.getResult();
}
// cache result
bounds = ts;
// could throw away bound ASTs here; thread safety?
}
return bounds.clone(); // return cached bounds
}
/**
* Returns the <tt>GenericDeclaration</tt> object representing the
* generic declaration that declared this type variable.
*
* @return the generic declaration that declared this type variable.
*
* @since 1.5
*/
public D getGenericDeclaration(){
if (genericDeclaration instanceof Class)
ReflectUtil.checkPackageAccess((Class)genericDeclaration);
else if ((genericDeclaration instanceof Method) ||
(genericDeclaration instanceof Constructor))
ReflectUtil.conservativeCheckMemberAccess((Member)genericDeclaration);
else
throw new AssertionError("Unexpected kind of GenericDeclaration");
return genericDeclaration;
}
/**
* Returns the name of this type variable, as it occurs in the source code.
*
* @return the name of this type variable, as it appears in the source code
*/
public String getName() { return name; }
public String toString() {return getName();}
@Override
public boolean equals(Object o) {
if (o instanceof TypeVariable &&
o.getClass() == TypeVariableImpl.class) {
TypeVariable<?> that = (TypeVariable<?>) o;
GenericDeclaration thatDecl = that.getGenericDeclaration();
String thatName = that.getName();
return Objects.equals(genericDeclaration, thatDecl) &&
Objects.equals(name, thatName);
} else
return false;
}
@Override
public int hashCode() {
return genericDeclaration.hashCode() ^ name.hashCode();
}
// Implementations of AnnotatedElement methods.
@SuppressWarnings("unchecked")
public <T extends Annotation> T getAnnotation(Class<T> annotationClass) {
Objects.requireNonNull(annotationClass);
// T is an Annotation type, the return value of get will be an annotation
return (T)mapAnnotations(getAnnotations()).get(annotationClass);
}
public <T extends Annotation> T getDeclaredAnnotation(Class<T> annotationClass) {
Objects.requireNonNull(annotationClass);
return getAnnotation(annotationClass);
}
@Override
public <T extends Annotation> T[] getAnnotationsByType(Class<T> annotationClass) {
Objects.requireNonNull(annotationClass);
return AnnotationSupport.getDirectlyAndIndirectlyPresent(mapAnnotations(getAnnotations()), annotationClass);
}
@Override
public <T extends Annotation> T[] getDeclaredAnnotationsByType(Class<T> annotationClass) {
Objects.requireNonNull(annotationClass);
return getAnnotationsByType(annotationClass);
}
public Annotation[] getAnnotations() {
int myIndex = typeVarIndex();
if (myIndex < 0)
throw new AssertionError("Index must be non-negative.");
return TypeAnnotationParser.parseTypeVariableAnnotations(getGenericDeclaration(), myIndex);
}
public Annotation[] getDeclaredAnnotations() {
return getAnnotations();
}
public AnnotatedType[] getAnnotatedBounds() {
return TypeAnnotationParser.parseAnnotatedBounds(getBounds(),
getGenericDeclaration(),
typeVarIndex());
}
private static final Annotation[] EMPTY_ANNOTATION_ARRAY = new Annotation[0];
// Helpers for annotation methods
private int typeVarIndex() {
TypeVariable<?>[] tVars = getGenericDeclaration().getTypeParameters();
int i = -1;
for (TypeVariable<?> v : tVars) {
i++;
if (equals(v))
return i;
}
return -1;
}
private static Map<Class<? extends Annotation>, Annotation> mapAnnotations(Annotation[] annos) {
Map<Class<? extends Annotation>, Annotation> result =
new LinkedHashMap<>();
for (Annotation a : annos) {
Class<? extends Annotation> klass = a.annotationType();
AnnotationType type = AnnotationType.getInstance(klass);
if (type.retention() == RetentionPolicy.RUNTIME)
if (result.put(klass, a) != null)
throw new AnnotationFormatError("Duplicate annotation for class: "+klass+": " + a);
}
return result;
}
}
| |
/*
* Copyright 2010-2012 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.cloudsearch.model;
/**
* <p>
* The current status of the search domain.
* </p>
*/
public class DomainStatus {
/**
* An internally generated unique identifier for a domain.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>1 - 64<br/>
*/
private String domainId;
/**
* A string that represents the name of a domain. Domain names must be
* unique across the domains owned by an account within an AWS region.
* Domain names must start with a letter or number and can contain the
* following characters: a-z (lowercase), 0-9, and - (hyphen). Uppercase
* letters and underscores are not allowed.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>3 - 28<br/>
* <b>Pattern: </b>[a-z][a-z0-9\-]+<br/>
*/
private String domainName;
/**
* True if the search domain is created. It can take several minutes to
* initialize a domain when <a>CreateDomain</a> is called. Newly created
* search domains are returned from <a>DescribeDomains</a> with a false
* value for Created until domain creation is complete.
*/
private Boolean created;
/**
* True if the search domain has been deleted. The system must clean up
* resources dedicated to the search domain when <a>DeleteDomain</a> is
* called. Newly deleted search domains are returned from
* <a>DescribeDomains</a> with a true value for IsDeleted for several
* minutes until resource cleanup is complete.
*/
private Boolean deleted;
/**
* The number of documents that have been submitted to the domain and
* indexed.
* <p>
* <b>Constraints:</b><br/>
* <b>Range: </b>0 - <br/>
*/
private Integer numSearchableDocs;
/**
* The service endpoint for updating documents in a search domain.
*/
private ServiceEndpoint docService;
/**
* The service endpoint for requesting search results from a search
* domain.
*/
private ServiceEndpoint searchService;
/**
* True if <a>IndexDocuments</a> needs to be called to activate the
* current domain configuration.
*/
private Boolean requiresIndexDocuments;
/**
* True if processing is being done to activate the current domain
* configuration.
*/
private Boolean processing;
/**
* The instance type that is being used to process search requests.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>SearchInstance:t1.micro, SearchInstance:m1.small, SearchInstance:m1.large, SearchInstance:m2.xlarge
*/
private String searchInstanceType;
/**
* The number of partitions across which the search index is spread.
* <p>
* <b>Constraints:</b><br/>
* <b>Range: </b>1 - <br/>
*/
private Integer searchPartitionCount;
/**
* The number of search instances that are available to process search
* requests.
* <p>
* <b>Constraints:</b><br/>
* <b>Range: </b>1 - <br/>
*/
private Integer searchInstanceCount;
/**
* An internally generated unique identifier for a domain.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>1 - 64<br/>
*
* @return An internally generated unique identifier for a domain.
*/
public String getDomainId() {
return domainId;
}
/**
* An internally generated unique identifier for a domain.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>1 - 64<br/>
*
* @param domainId An internally generated unique identifier for a domain.
*/
public void setDomainId(String domainId) {
this.domainId = domainId;
}
/**
* An internally generated unique identifier for a domain.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>1 - 64<br/>
*
* @param domainId An internally generated unique identifier for a domain.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public DomainStatus withDomainId(String domainId) {
this.domainId = domainId;
return this;
}
/**
* A string that represents the name of a domain. Domain names must be
* unique across the domains owned by an account within an AWS region.
* Domain names must start with a letter or number and can contain the
* following characters: a-z (lowercase), 0-9, and - (hyphen). Uppercase
* letters and underscores are not allowed.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>3 - 28<br/>
* <b>Pattern: </b>[a-z][a-z0-9\-]+<br/>
*
* @return A string that represents the name of a domain. Domain names must be
* unique across the domains owned by an account within an AWS region.
* Domain names must start with a letter or number and can contain the
* following characters: a-z (lowercase), 0-9, and - (hyphen). Uppercase
* letters and underscores are not allowed.
*/
public String getDomainName() {
return domainName;
}
/**
* A string that represents the name of a domain. Domain names must be
* unique across the domains owned by an account within an AWS region.
* Domain names must start with a letter or number and can contain the
* following characters: a-z (lowercase), 0-9, and - (hyphen). Uppercase
* letters and underscores are not allowed.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>3 - 28<br/>
* <b>Pattern: </b>[a-z][a-z0-9\-]+<br/>
*
* @param domainName A string that represents the name of a domain. Domain names must be
* unique across the domains owned by an account within an AWS region.
* Domain names must start with a letter or number and can contain the
* following characters: a-z (lowercase), 0-9, and - (hyphen). Uppercase
* letters and underscores are not allowed.
*/
public void setDomainName(String domainName) {
this.domainName = domainName;
}
/**
* A string that represents the name of a domain. Domain names must be
* unique across the domains owned by an account within an AWS region.
* Domain names must start with a letter or number and can contain the
* following characters: a-z (lowercase), 0-9, and - (hyphen). Uppercase
* letters and underscores are not allowed.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>3 - 28<br/>
* <b>Pattern: </b>[a-z][a-z0-9\-]+<br/>
*
* @param domainName A string that represents the name of a domain. Domain names must be
* unique across the domains owned by an account within an AWS region.
* Domain names must start with a letter or number and can contain the
* following characters: a-z (lowercase), 0-9, and - (hyphen). Uppercase
* letters and underscores are not allowed.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public DomainStatus withDomainName(String domainName) {
this.domainName = domainName;
return this;
}
/**
* True if the search domain is created. It can take several minutes to
* initialize a domain when <a>CreateDomain</a> is called. Newly created
* search domains are returned from <a>DescribeDomains</a> with a false
* value for Created until domain creation is complete.
*
* @return True if the search domain is created. It can take several minutes to
* initialize a domain when <a>CreateDomain</a> is called. Newly created
* search domains are returned from <a>DescribeDomains</a> with a false
* value for Created until domain creation is complete.
*/
public Boolean isCreated() {
return created;
}
/**
* True if the search domain is created. It can take several minutes to
* initialize a domain when <a>CreateDomain</a> is called. Newly created
* search domains are returned from <a>DescribeDomains</a> with a false
* value for Created until domain creation is complete.
*
* @param created True if the search domain is created. It can take several minutes to
* initialize a domain when <a>CreateDomain</a> is called. Newly created
* search domains are returned from <a>DescribeDomains</a> with a false
* value for Created until domain creation is complete.
*/
public void setCreated(Boolean created) {
this.created = created;
}
/**
* True if the search domain is created. It can take several minutes to
* initialize a domain when <a>CreateDomain</a> is called. Newly created
* search domains are returned from <a>DescribeDomains</a> with a false
* value for Created until domain creation is complete.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param created True if the search domain is created. It can take several minutes to
* initialize a domain when <a>CreateDomain</a> is called. Newly created
* search domains are returned from <a>DescribeDomains</a> with a false
* value for Created until domain creation is complete.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public DomainStatus withCreated(Boolean created) {
this.created = created;
return this;
}
/**
* True if the search domain is created. It can take several minutes to
* initialize a domain when <a>CreateDomain</a> is called. Newly created
* search domains are returned from <a>DescribeDomains</a> with a false
* value for Created until domain creation is complete.
*
* @return True if the search domain is created. It can take several minutes to
* initialize a domain when <a>CreateDomain</a> is called. Newly created
* search domains are returned from <a>DescribeDomains</a> with a false
* value for Created until domain creation is complete.
*/
public Boolean getCreated() {
return created;
}
/**
* True if the search domain has been deleted. The system must clean up
* resources dedicated to the search domain when <a>DeleteDomain</a> is
* called. Newly deleted search domains are returned from
* <a>DescribeDomains</a> with a true value for IsDeleted for several
* minutes until resource cleanup is complete.
*
* @return True if the search domain has been deleted. The system must clean up
* resources dedicated to the search domain when <a>DeleteDomain</a> is
* called. Newly deleted search domains are returned from
* <a>DescribeDomains</a> with a true value for IsDeleted for several
* minutes until resource cleanup is complete.
*/
public Boolean isDeleted() {
return deleted;
}
/**
* True if the search domain has been deleted. The system must clean up
* resources dedicated to the search domain when <a>DeleteDomain</a> is
* called. Newly deleted search domains are returned from
* <a>DescribeDomains</a> with a true value for IsDeleted for several
* minutes until resource cleanup is complete.
*
* @param deleted True if the search domain has been deleted. The system must clean up
* resources dedicated to the search domain when <a>DeleteDomain</a> is
* called. Newly deleted search domains are returned from
* <a>DescribeDomains</a> with a true value for IsDeleted for several
* minutes until resource cleanup is complete.
*/
public void setDeleted(Boolean deleted) {
this.deleted = deleted;
}
/**
* True if the search domain has been deleted. The system must clean up
* resources dedicated to the search domain when <a>DeleteDomain</a> is
* called. Newly deleted search domains are returned from
* <a>DescribeDomains</a> with a true value for IsDeleted for several
* minutes until resource cleanup is complete.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param deleted True if the search domain has been deleted. The system must clean up
* resources dedicated to the search domain when <a>DeleteDomain</a> is
* called. Newly deleted search domains are returned from
* <a>DescribeDomains</a> with a true value for IsDeleted for several
* minutes until resource cleanup is complete.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public DomainStatus withDeleted(Boolean deleted) {
this.deleted = deleted;
return this;
}
/**
* True if the search domain has been deleted. The system must clean up
* resources dedicated to the search domain when <a>DeleteDomain</a> is
* called. Newly deleted search domains are returned from
* <a>DescribeDomains</a> with a true value for IsDeleted for several
* minutes until resource cleanup is complete.
*
* @return True if the search domain has been deleted. The system must clean up
* resources dedicated to the search domain when <a>DeleteDomain</a> is
* called. Newly deleted search domains are returned from
* <a>DescribeDomains</a> with a true value for IsDeleted for several
* minutes until resource cleanup is complete.
*/
public Boolean getDeleted() {
return deleted;
}
/**
* The number of documents that have been submitted to the domain and
* indexed.
* <p>
* <b>Constraints:</b><br/>
* <b>Range: </b>0 - <br/>
*
* @return The number of documents that have been submitted to the domain and
* indexed.
*/
public Integer getNumSearchableDocs() {
return numSearchableDocs;
}
/**
* The number of documents that have been submitted to the domain and
* indexed.
* <p>
* <b>Constraints:</b><br/>
* <b>Range: </b>0 - <br/>
*
* @param numSearchableDocs The number of documents that have been submitted to the domain and
* indexed.
*/
public void setNumSearchableDocs(Integer numSearchableDocs) {
this.numSearchableDocs = numSearchableDocs;
}
/**
* The number of documents that have been submitted to the domain and
* indexed.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Range: </b>0 - <br/>
*
* @param numSearchableDocs The number of documents that have been submitted to the domain and
* indexed.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public DomainStatus withNumSearchableDocs(Integer numSearchableDocs) {
this.numSearchableDocs = numSearchableDocs;
return this;
}
/**
* The service endpoint for updating documents in a search domain.
*
* @return The service endpoint for updating documents in a search domain.
*/
public ServiceEndpoint getDocService() {
return docService;
}
/**
* The service endpoint for updating documents in a search domain.
*
* @param docService The service endpoint for updating documents in a search domain.
*/
public void setDocService(ServiceEndpoint docService) {
this.docService = docService;
}
/**
* The service endpoint for updating documents in a search domain.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param docService The service endpoint for updating documents in a search domain.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public DomainStatus withDocService(ServiceEndpoint docService) {
this.docService = docService;
return this;
}
/**
* The service endpoint for requesting search results from a search
* domain.
*
* @return The service endpoint for requesting search results from a search
* domain.
*/
public ServiceEndpoint getSearchService() {
return searchService;
}
/**
* The service endpoint for requesting search results from a search
* domain.
*
* @param searchService The service endpoint for requesting search results from a search
* domain.
*/
public void setSearchService(ServiceEndpoint searchService) {
this.searchService = searchService;
}
/**
* The service endpoint for requesting search results from a search
* domain.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param searchService The service endpoint for requesting search results from a search
* domain.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public DomainStatus withSearchService(ServiceEndpoint searchService) {
this.searchService = searchService;
return this;
}
/**
* True if <a>IndexDocuments</a> needs to be called to activate the
* current domain configuration.
*
* @return True if <a>IndexDocuments</a> needs to be called to activate the
* current domain configuration.
*/
public Boolean isRequiresIndexDocuments() {
return requiresIndexDocuments;
}
/**
* True if <a>IndexDocuments</a> needs to be called to activate the
* current domain configuration.
*
* @param requiresIndexDocuments True if <a>IndexDocuments</a> needs to be called to activate the
* current domain configuration.
*/
public void setRequiresIndexDocuments(Boolean requiresIndexDocuments) {
this.requiresIndexDocuments = requiresIndexDocuments;
}
/**
* True if <a>IndexDocuments</a> needs to be called to activate the
* current domain configuration.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param requiresIndexDocuments True if <a>IndexDocuments</a> needs to be called to activate the
* current domain configuration.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public DomainStatus withRequiresIndexDocuments(Boolean requiresIndexDocuments) {
this.requiresIndexDocuments = requiresIndexDocuments;
return this;
}
/**
* True if <a>IndexDocuments</a> needs to be called to activate the
* current domain configuration.
*
* @return True if <a>IndexDocuments</a> needs to be called to activate the
* current domain configuration.
*/
public Boolean getRequiresIndexDocuments() {
return requiresIndexDocuments;
}
/**
* True if processing is being done to activate the current domain
* configuration.
*
* @return True if processing is being done to activate the current domain
* configuration.
*/
public Boolean isProcessing() {
return processing;
}
/**
* True if processing is being done to activate the current domain
* configuration.
*
* @param processing True if processing is being done to activate the current domain
* configuration.
*/
public void setProcessing(Boolean processing) {
this.processing = processing;
}
/**
* True if processing is being done to activate the current domain
* configuration.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param processing True if processing is being done to activate the current domain
* configuration.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public DomainStatus withProcessing(Boolean processing) {
this.processing = processing;
return this;
}
/**
* True if processing is being done to activate the current domain
* configuration.
*
* @return True if processing is being done to activate the current domain
* configuration.
*/
public Boolean getProcessing() {
return processing;
}
/**
* The instance type that is being used to process search requests.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>SearchInstance:t1.micro, SearchInstance:m1.small, SearchInstance:m1.large, SearchInstance:m2.xlarge
*
* @return The instance type that is being used to process search requests.
*
* @see SearchInstanceType
*/
public String getSearchInstanceType() {
return searchInstanceType;
}
/**
* The instance type that is being used to process search requests.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>SearchInstance:t1.micro, SearchInstance:m1.small, SearchInstance:m1.large, SearchInstance:m2.xlarge
*
* @param searchInstanceType The instance type that is being used to process search requests.
*
* @see SearchInstanceType
*/
public void setSearchInstanceType(String searchInstanceType) {
this.searchInstanceType = searchInstanceType;
}
/**
* The instance type that is being used to process search requests.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>SearchInstance:t1.micro, SearchInstance:m1.small, SearchInstance:m1.large, SearchInstance:m2.xlarge
*
* @param searchInstanceType The instance type that is being used to process search requests.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*
* @see SearchInstanceType
*/
public DomainStatus withSearchInstanceType(String searchInstanceType) {
this.searchInstanceType = searchInstanceType;
return this;
}
/**
* The instance type that is being used to process search requests.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>SearchInstance:t1.micro, SearchInstance:m1.small, SearchInstance:m1.large, SearchInstance:m2.xlarge
*
* @param searchInstanceType The instance type that is being used to process search requests.
*
* @see SearchInstanceType
*/
public void setSearchInstanceType(SearchInstanceType searchInstanceType) {
this.searchInstanceType = searchInstanceType.toString();
}
/**
* The instance type that is being used to process search requests.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>SearchInstance:t1.micro, SearchInstance:m1.small, SearchInstance:m1.large, SearchInstance:m2.xlarge
*
* @param searchInstanceType The instance type that is being used to process search requests.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*
* @see SearchInstanceType
*/
public DomainStatus withSearchInstanceType(SearchInstanceType searchInstanceType) {
this.searchInstanceType = searchInstanceType.toString();
return this;
}
/**
* The number of partitions across which the search index is spread.
* <p>
* <b>Constraints:</b><br/>
* <b>Range: </b>1 - <br/>
*
* @return The number of partitions across which the search index is spread.
*/
public Integer getSearchPartitionCount() {
return searchPartitionCount;
}
/**
* The number of partitions across which the search index is spread.
* <p>
* <b>Constraints:</b><br/>
* <b>Range: </b>1 - <br/>
*
* @param searchPartitionCount The number of partitions across which the search index is spread.
*/
public void setSearchPartitionCount(Integer searchPartitionCount) {
this.searchPartitionCount = searchPartitionCount;
}
/**
* The number of partitions across which the search index is spread.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Range: </b>1 - <br/>
*
* @param searchPartitionCount The number of partitions across which the search index is spread.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public DomainStatus withSearchPartitionCount(Integer searchPartitionCount) {
this.searchPartitionCount = searchPartitionCount;
return this;
}
/**
* The number of search instances that are available to process search
* requests.
* <p>
* <b>Constraints:</b><br/>
* <b>Range: </b>1 - <br/>
*
* @return The number of search instances that are available to process search
* requests.
*/
public Integer getSearchInstanceCount() {
return searchInstanceCount;
}
/**
* The number of search instances that are available to process search
* requests.
* <p>
* <b>Constraints:</b><br/>
* <b>Range: </b>1 - <br/>
*
* @param searchInstanceCount The number of search instances that are available to process search
* requests.
*/
public void setSearchInstanceCount(Integer searchInstanceCount) {
this.searchInstanceCount = searchInstanceCount;
}
/**
* The number of search instances that are available to process search
* requests.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Range: </b>1 - <br/>
*
* @param searchInstanceCount The number of search instances that are available to process search
* requests.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public DomainStatus withSearchInstanceCount(Integer searchInstanceCount) {
this.searchInstanceCount = searchInstanceCount;
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (domainId != null) sb.append("DomainId: " + domainId + ", ");
if (domainName != null) sb.append("DomainName: " + domainName + ", ");
if (created != null) sb.append("Created: " + created + ", ");
if (deleted != null) sb.append("Deleted: " + deleted + ", ");
if (numSearchableDocs != null) sb.append("NumSearchableDocs: " + numSearchableDocs + ", ");
if (docService != null) sb.append("DocService: " + docService + ", ");
if (searchService != null) sb.append("SearchService: " + searchService + ", ");
if (requiresIndexDocuments != null) sb.append("RequiresIndexDocuments: " + requiresIndexDocuments + ", ");
if (processing != null) sb.append("Processing: " + processing + ", ");
if (searchInstanceType != null) sb.append("SearchInstanceType: " + searchInstanceType + ", ");
if (searchPartitionCount != null) sb.append("SearchPartitionCount: " + searchPartitionCount + ", ");
if (searchInstanceCount != null) sb.append("SearchInstanceCount: " + searchInstanceCount + ", ");
sb.append("}");
return sb.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getDomainId() == null) ? 0 : getDomainId().hashCode());
hashCode = prime * hashCode + ((getDomainName() == null) ? 0 : getDomainName().hashCode());
hashCode = prime * hashCode + ((isCreated() == null) ? 0 : isCreated().hashCode());
hashCode = prime * hashCode + ((isDeleted() == null) ? 0 : isDeleted().hashCode());
hashCode = prime * hashCode + ((getNumSearchableDocs() == null) ? 0 : getNumSearchableDocs().hashCode());
hashCode = prime * hashCode + ((getDocService() == null) ? 0 : getDocService().hashCode());
hashCode = prime * hashCode + ((getSearchService() == null) ? 0 : getSearchService().hashCode());
hashCode = prime * hashCode + ((isRequiresIndexDocuments() == null) ? 0 : isRequiresIndexDocuments().hashCode());
hashCode = prime * hashCode + ((isProcessing() == null) ? 0 : isProcessing().hashCode());
hashCode = prime * hashCode + ((getSearchInstanceType() == null) ? 0 : getSearchInstanceType().hashCode());
hashCode = prime * hashCode + ((getSearchPartitionCount() == null) ? 0 : getSearchPartitionCount().hashCode());
hashCode = prime * hashCode + ((getSearchInstanceCount() == null) ? 0 : getSearchInstanceCount().hashCode());
return hashCode;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (obj instanceof DomainStatus == false) return false;
DomainStatus other = (DomainStatus)obj;
if (other.getDomainId() == null ^ this.getDomainId() == null) return false;
if (other.getDomainId() != null && other.getDomainId().equals(this.getDomainId()) == false) return false;
if (other.getDomainName() == null ^ this.getDomainName() == null) return false;
if (other.getDomainName() != null && other.getDomainName().equals(this.getDomainName()) == false) return false;
if (other.isCreated() == null ^ this.isCreated() == null) return false;
if (other.isCreated() != null && other.isCreated().equals(this.isCreated()) == false) return false;
if (other.isDeleted() == null ^ this.isDeleted() == null) return false;
if (other.isDeleted() != null && other.isDeleted().equals(this.isDeleted()) == false) return false;
if (other.getNumSearchableDocs() == null ^ this.getNumSearchableDocs() == null) return false;
if (other.getNumSearchableDocs() != null && other.getNumSearchableDocs().equals(this.getNumSearchableDocs()) == false) return false;
if (other.getDocService() == null ^ this.getDocService() == null) return false;
if (other.getDocService() != null && other.getDocService().equals(this.getDocService()) == false) return false;
if (other.getSearchService() == null ^ this.getSearchService() == null) return false;
if (other.getSearchService() != null && other.getSearchService().equals(this.getSearchService()) == false) return false;
if (other.isRequiresIndexDocuments() == null ^ this.isRequiresIndexDocuments() == null) return false;
if (other.isRequiresIndexDocuments() != null && other.isRequiresIndexDocuments().equals(this.isRequiresIndexDocuments()) == false) return false;
if (other.isProcessing() == null ^ this.isProcessing() == null) return false;
if (other.isProcessing() != null && other.isProcessing().equals(this.isProcessing()) == false) return false;
if (other.getSearchInstanceType() == null ^ this.getSearchInstanceType() == null) return false;
if (other.getSearchInstanceType() != null && other.getSearchInstanceType().equals(this.getSearchInstanceType()) == false) return false;
if (other.getSearchPartitionCount() == null ^ this.getSearchPartitionCount() == null) return false;
if (other.getSearchPartitionCount() != null && other.getSearchPartitionCount().equals(this.getSearchPartitionCount()) == false) return false;
if (other.getSearchInstanceCount() == null ^ this.getSearchInstanceCount() == null) return false;
if (other.getSearchInstanceCount() != null && other.getSearchInstanceCount().equals(this.getSearchInstanceCount()) == false) return false;
return true;
}
}
| |
/*
* ***********************************************************************
Copyright [2011] [PagSeguro Internet Ltda.]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
* ***********************************************************************
*/
package br.com.uol.pagseguro.domain;
import java.util.ArrayList;
import java.util.List;
import br.com.uol.pagseguro.enums.DocumentType;
import br.com.uol.pagseguro.helper.PagSeguroUtil;
/**
* Represents the party on the transaction that is sending the money
*/
public class Sender {
/** Sender name */
private String name;
/** Sender e-mail */
private String email;
/** Sender born date */
private String bornDate;
/** Sender phone */
private Phone phone;
/** Sender documents */
private List<SenderDocument> documents;
/** Sender hash */
private String hash;
/** Sender ip */
private String ip;
/**
* Initializes a new instance of the Sender class
*/
public Sender() {
}
/**
* Initializes a new instance of the Sender class
*
* @param name
* @param email
*/
public Sender(String name, String email) {
this.name = PagSeguroUtil.removeExtraSpaces(name);
this.email = email;
}
/**
* Initializes a new instance of the Sender class
*
* @param name
* @param email
* @param phone
*/
public Sender(String name, String email, Phone phone) {
this.name = PagSeguroUtil.removeExtraSpaces(name);
this.email = email;
this.phone = phone;
}
/**
* Initializes a new instance of the Sender class
*
* @param name
* @param email
* @param phone
* @param document
*/
public Sender(String name, String email, Phone phone, SenderDocument document) {
this.name = PagSeguroUtil.removeExtraSpaces(name);
this.email = email;
this.phone = phone;
this.documents = new ArrayList<SenderDocument>();
documents.add(document);
}
/**
* Initializes a new instance of the Sender class
*
* @param name
* @param email
* @param phone
* @param document
* @param bornDate
*/
public Sender(String name, String email, Phone phone, SenderDocument document, String bornDate) {
this.name = PagSeguroUtil.removeExtraSpaces(name);
this.email = email;
this.phone = phone;
this.documents = new ArrayList<SenderDocument>();
documents.add(document);
this.bornDate = bornDate;
}
/**
* Initializes a new instance of the Sender class
*
* @param name
* @param email
* @param phone
* @param bornDate
*/
public Sender(String name, String email, Phone phone, String bornDate) {
this.name = PagSeguroUtil.removeExtraSpaces(name);
this.email = email;
this.phone = phone;
this.bornDate = bornDate;
}
/**
* @return the sender name
*/
public String getName() {
return name;
}
/**
* Sets the sender name
*
* @param name
*/
public void setName(String name) {
this.name = PagSeguroUtil.removeExtraSpaces(name);
}
/**
* @return the sender e-mail
*/
public String getEmail() {
return email;
}
/**
* Sets the sender e-mail
*
* @param email
*/
public void setEmail(String email) {
this.email = email;
}
/**
* @return the sender born date
*/
public String getBornDate() {
return bornDate;
}
/**
* Sets the sender born date
*
* @param email
*/
public void setBornDate(String bornDate) {
this.bornDate = bornDate;
}
/**
* @return the sender phone
*/
public Phone getPhone() {
if (phone == null) {
phone = new Phone();
}
return phone;
}
/**
* Sets the sender phone
*
* @param phone
*/
public void setPhone(Phone phone) {
this.phone = phone;
}
/**
* Gets sender documents list
*
* @return the sender documents list
*/
public List<SenderDocument> getDocuments() {
if (documents == null) {
documents = new ArrayList<SenderDocument>();
}
return documents;
}
/**
* Sets the sender documents list
*
* @param documents
*/
public void setDocuments(List<SenderDocument> documents) {
this.documents = documents;
}
/**
* Add a document for sender documents list
*
* @param document
*/
public void addDocument(SenderDocument document) {
getDocuments().add(document);
}
/**
* Add a document for sender documents list
*
* @param type
* @param value
*/
public void addDocument(DocumentType type, String value) {
addDocument(new SenderDocument(type, value));
}
/**
* @return the sender hash
*/
public String getHash() {
return hash;
}
/**
* Sets the sender hash
*
* @param hash
*/
public void setHash(String hash) {
this.hash = hash;
}
/**
* @return the sender ip
*/
public String getIp() {
return ip;
}
/**
* Sets the sender ip
*
* @param ip
*/
public void setIp(String ip) {
this.ip = ip;
}
/**
* return string
*/
@Override
public String toString() {
return "Sender [name=" + name + ", email=" + email + ", phone=" + phone + ", documents=" + documents
+ ", bornDate=" + bornDate + "]";
}
}
| |
/**
* Copyright (C) 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sangupta.colors.extract.android;
import java.util.HashMap;
import java.util.Locale;
/**
* The Color class defines methods for creating and converting color ints.
* Colors are represented as packed ints, made up of 4 bytes: alpha, red, green,
* blue. The values are unpremultiplied, meaning any transparency is stored
* solely in the alpha component, and not in the color components. The
* components are stored as follows (alpha << 24) | (red << 16) |
* (green << 8) | blue. Each component ranges between 0..255 with 0
* meaning no contribution for that component, and 255 meaning 100%
* contribution. Thus opaque-black would be 0xFF000000 (100% opaque but no
* contributions from red, green, or blue), and opaque-white would be 0xFFFFFFFF
*
* @author sangupta
* @since 1.0.0
*/
public class Color {
public static final int BLACK = 0xFF000000;
public static final int DKGRAY = 0xFF444444;
public static final int GRAY = 0xFF888888;
public static final int LTGRAY = 0xFFCCCCCC;
public static final int WHITE = 0xFFFFFFFF;
public static final int RED = 0xFFFF0000;
public static final int GREEN = 0xFF00FF00;
public static final int BLUE = 0xFF0000FF;
public static final int YELLOW = 0xFFFFFF00;
public static final int CYAN = 0xFF00FFFF;
public static final int MAGENTA = 0xFFFF00FF;
public static final int TRANSPARENT = 0;
/**
* Return the alpha component of a color int. This is the same as saying color
* >>> 24
*/
public static int alpha(int color) {
return color >>> 24;
}
/**
* Return the red component of a color int. This is the same as saying (color
* >> 16) & 0xFF
*/
public static int red(int color) {
return (color >> 16) & 0xFF;
}
/**
* Return the green component of a color int. This is the same as saying (color
* >> 8) & 0xFF
*/
public static int green(int color) {
return (color >> 8) & 0xFF;
}
/**
* Return the blue component of a color int. This is the same as saying color
* & 0xFF
*/
public static int blue(int color) {
return color & 0xFF;
}
/**
* Return a color-int from red, green, blue components. The alpha component is
* implicity 255 (fully opaque). These component values should be [0..255], but
* there is no range check performed, so if they are out of range, the returned
* color is undefined.
*
* @param red Red component [0..255] of the color
* @param green Green component [0..255] of the color
* @param blue Blue component [0..255] of the color
*/
public static int rgb(int red, int green, int blue) {
return (0xFF << 24) | (red << 16) | (green << 8) | blue;
}
/**
* Return a color-int from alpha, red, green, blue components. These component
* values should be [0..255], but there is no range check performed, so if they
* are out of range, the returned color is undefined.
*
* @param alpha Alpha component [0..255] of the color
* @param red Red component [0..255] of the color
* @param green Green component [0..255] of the color
* @param blue Blue component [0..255] of the color
*/
public static int argb(int alpha, int red, int green, int blue) {
return (alpha << 24) | (red << 16) | (green << 8) | blue;
}
/**
* Returns the hue component of a color int.
*
* <b>Pending API council</b>
*
* @return A value between 0.0f and 1.0f
*/
public static float hue(int color) {
int r = (color >> 16) & 0xFF;
int g = (color >> 8) & 0xFF;
int b = color & 0xFF;
int V = Math.max(b, Math.max(r, g));
int temp = Math.min(b, Math.min(r, g));
float H;
if (V == temp) {
H = 0;
} else {
final float vtemp = (float) (V - temp);
final float cr = (V - r) / vtemp;
final float cg = (V - g) / vtemp;
final float cb = (V - b) / vtemp;
if (r == V) {
H = cb - cg;
} else if (g == V) {
H = 2 + cr - cb;
} else {
H = 4 + cg - cr;
}
H /= 6.f;
if (H < 0) {
H++;
}
}
return H;
}
/**
* Returns the saturation component of a color int.
*
* <b>Pending API council</b>
*
* @return A value between 0.0f and 1.0f
*/
public static float saturation(int color) {
int r = (color >> 16) & 0xFF;
int g = (color >> 8) & 0xFF;
int b = color & 0xFF;
int V = Math.max(b, Math.max(r, g));
int temp = Math.min(b, Math.min(r, g));
float S;
if (V == temp) {
S = 0;
} else {
S = (V - temp) / (float) V;
}
return S;
}
/**
* Returns the brightness component of a color int.
*
* <b>Pending API council</b>
*
* @return A value between 0.0f and 1.0f
*/
public static float brightness(int color) {
int r = (color >> 16) & 0xFF;
int g = (color >> 8) & 0xFF;
int b = color & 0xFF;
int V = Math.max(b, Math.max(r, g));
return (V / 255.f);
}
/**
* Parse the color string, and return the corresponding color-int. If the string
* cannot be parsed, throws an IllegalArgumentException exception. Supported
* formats are: #RRGGBB #AARRGGBB 'red', 'blue', 'green', 'black', 'white',
* 'gray', 'cyan', 'magenta', 'yellow', 'lightgray', 'darkgray', 'grey',
* 'lightgrey', 'darkgrey', 'aqua', 'fuschia', 'lime', 'maroon', 'navy',
* 'olive', 'purple', 'silver', 'teal'
*/
public static int parseColor(String colorString) {
if (colorString.charAt(0) == '#') {
// Use a long to avoid rollovers on #ffXXXXXX
long color = Long.parseLong(colorString.substring(1), 16);
if (colorString.length() == 7) {
// Set the alpha value
color |= 0x00000000ff000000;
} else if (colorString.length() != 9) {
throw new IllegalArgumentException("Unknown color");
}
return (int) color;
} else {
Integer color = sColorNameMap.get(colorString.toLowerCase(Locale.ROOT));
if (color != null) {
return color;
}
}
throw new IllegalArgumentException("Unknown color");
}
/**
* Convert RGB components to HSV. hsv[0] is Hue [0 .. 360) hsv[1] is Saturation
* [0...1] hsv[2] is Value [0...1]
*
* @param red red component value [0..255]
* @param green green component value [0..255]
* @param blue blue component value [0..255]
* @param hsv 3 element array which holds the resulting HSV components.
*/
public static void RGBToHSV(int red, int green, int blue, float hsv[]) {
if (hsv.length < 3) {
throw new RuntimeException("3 components required for hsv");
}
nativeRGBToHSV(red, green, blue, hsv);
}
/**
* Convert the argb color to its HSV components. hsv[0] is Hue [0 .. 360) hsv[1]
* is Saturation [0...1] hsv[2] is Value [0...1]
*
* @param color the argb color to convert. The alpha component is ignored.
* @param hsv 3 element array which holds the resulting HSV components.
*/
public static void colorToHSV(int color, float hsv[]) {
RGBToHSV((color >> 16) & 0xFF, (color >> 8) & 0xFF, color & 0xFF, hsv);
}
/**
* Convert HSV components to an ARGB color. Alpha set to 0xFF. hsv[0] is Hue [0
* .. 360) hsv[1] is Saturation [0...1] hsv[2] is Value [0...1] If hsv values
* are out of range, they are pinned.
*
* @param hsv 3 element array which holds the input HSV components.
* @return the resulting argb color
*/
public static int HSVToColor(float hsv[]) {
return HSVToColor(0xFF, hsv);
}
/**
* Convert HSV components to an ARGB color. The alpha component is passed
* through unchanged. hsv[0] is Hue [0 .. 360) hsv[1] is Saturation [0...1]
* hsv[2] is Value [0...1] If hsv values are out of range, they are pinned.
*
* @param alpha the alpha component of the returned argb color.
* @param hsv 3 element array which holds the input HSV components.
* @return the resulting argb color
*/
public static int HSVToColor(int alpha, float hsv[]) {
if (hsv.length < 3) {
throw new RuntimeException("3 components required for hsv");
}
return nativeHSVToColor(alpha, hsv);
}
private static native void nativeRGBToHSV(int red, int greed, int blue, float hsv[]);
private static native int nativeHSVToColor(int alpha, float hsv[]);
private static final HashMap<String, Integer> sColorNameMap;
static {
sColorNameMap = new HashMap<String, Integer>();
sColorNameMap.put("black", BLACK);
sColorNameMap.put("darkgray", DKGRAY);
sColorNameMap.put("gray", GRAY);
sColorNameMap.put("lightgray", LTGRAY);
sColorNameMap.put("white", WHITE);
sColorNameMap.put("red", RED);
sColorNameMap.put("green", GREEN);
sColorNameMap.put("blue", BLUE);
sColorNameMap.put("yellow", YELLOW);
sColorNameMap.put("cyan", CYAN);
sColorNameMap.put("magenta", MAGENTA);
sColorNameMap.put("aqua", 0xFF00FFFF);
sColorNameMap.put("fuchsia", 0xFFFF00FF);
sColorNameMap.put("darkgrey", DKGRAY);
sColorNameMap.put("grey", GRAY);
sColorNameMap.put("lightgrey", LTGRAY);
sColorNameMap.put("lime", 0xFF00FF00);
sColorNameMap.put("maroon", 0xFF800000);
sColorNameMap.put("navy", 0xFF000080);
sColorNameMap.put("olive", 0xFF808000);
sColorNameMap.put("purple", 0xFF800080);
sColorNameMap.put("silver", 0xFFC0C0C0);
sColorNameMap.put("teal", 0xFF008080);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.namenode;
import java.io.IOException;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants;
import org.apache.hadoop.hdfs.server.common.InconsistentFSStateException;
import org.apache.hadoop.hdfs.server.common.Storage.StorageDirectory;
import org.apache.hadoop.hdfs.server.common.Storage.StorageState;
import org.apache.hadoop.util.StringUtils;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
/**
* Extension of FSImage for the backup node.
* This class handles the setup of the journaling
* spool on the backup namenode.
*/
@InterfaceAudience.Private
public class BackupImage extends FSImage {
/** Backup input stream for loading edits into memory */
private final EditLogBackupInputStream backupInputStream =
new EditLogBackupInputStream("Data from remote NameNode");
/**
* Current state of the BackupNode. The BackupNode's state
* transitions are as follows:
*
* Initial: DROP_UNTIL_NEXT_ROLL
* - Transitions to JOURNAL_ONLY the next time the log rolls
* - Transitions to IN_SYNC in convergeJournalSpool
* - Transitions back to JOURNAL_ONLY if the log rolls while
* stopApplyingOnNextRoll is true.
*/
volatile BNState bnState;
enum BNState {
/**
* Edits from the NN should be dropped. On the next log roll,
* transition to JOURNAL_ONLY state
*/
DROP_UNTIL_NEXT_ROLL,
/**
* Edits from the NN should be written to the local edits log
* but not applied to the namespace.
*/
JOURNAL_ONLY,
/**
* Edits should be written to the local edits log and applied
* to the local namespace.
*/
IN_SYNC;
}
/**
* Flag to indicate that the next time the NN rolls, the BN
* should transition from to JOURNAL_ONLY state.
* {@see #freezeNamespaceAtNextRoll()}
*/
private boolean stopApplyingEditsOnNextRoll = false;
private FSNamesystem namesystem;
private int quotaInitThreads;
/**
* Construct a backup image.
* @param conf Configuration
* @throws IOException if storage cannot be initialised.
*/
BackupImage(Configuration conf) throws IOException {
super(conf);
storage.setDisablePreUpgradableLayoutCheck(true);
bnState = BNState.DROP_UNTIL_NEXT_ROLL;
}
synchronized FSNamesystem getNamesystem() {
return namesystem;
}
synchronized void setNamesystem(FSNamesystem fsn) {
// Avoids overriding this.namesystem object
if (namesystem == null) {
this.namesystem = fsn;
}
}
/**
* Analyze backup storage directories for consistency.<br>
* Recover from incomplete checkpoints if required.<br>
* Read VERSION and fstime files if exist.<br>
* Do not load image or edits.
*
* @throws IOException if the node should shutdown.
*/
void recoverCreateRead() throws IOException {
for (Iterator<StorageDirectory> it = storage.dirIterator(); it.hasNext();) {
StorageDirectory sd = it.next();
StorageState curState;
try {
curState = sd.analyzeStorage(HdfsServerConstants.StartupOption.REGULAR, storage);
// sd is locked but not opened
switch(curState) {
case NON_EXISTENT:
// fail if any of the configured storage dirs are inaccessible
throw new InconsistentFSStateException(sd.getRoot(),
"checkpoint directory does not exist or is not accessible.");
case NOT_FORMATTED:
// for backup node all directories may be unformatted initially
LOG.info("Storage directory " + sd.getRoot() + " is not formatted.");
LOG.info("Formatting ...");
sd.clearDirectory(); // create empty current
break;
case NORMAL:
break;
default: // recovery is possible
sd.doRecover(curState);
}
if(curState != StorageState.NOT_FORMATTED) {
// read and verify consistency with other directories
storage.readProperties(sd);
}
} catch(IOException ioe) {
sd.unlock();
throw ioe;
}
}
}
/**
* Receive a batch of edits from the NameNode.
*
* Depending on bnState, different actions are taken. See
* {@link BackupImage.BNState}
*
* @param firstTxId first txid in batch
* @param numTxns number of transactions
* @param data serialized journal records.
* @throws IOException
* @see #convergeJournalSpool()
*/
synchronized void journal(long firstTxId, int numTxns, byte[] data) throws IOException {
if (LOG.isTraceEnabled()) {
LOG.trace("Got journal, " +
"state = " + bnState +
"; firstTxId = " + firstTxId +
"; numTxns = " + numTxns);
}
switch(bnState) {
case DROP_UNTIL_NEXT_ROLL:
return;
case IN_SYNC:
// update NameSpace in memory
applyEdits(firstTxId, numTxns, data);
break;
case JOURNAL_ONLY:
break;
default:
throw new AssertionError("Unhandled state: " + bnState);
}
// write to BN's local edit log.
editLog.journal(firstTxId, numTxns, data);
}
/**
* Apply the batch of edits to the local namespace.
*/
private synchronized void applyEdits(long firstTxId, int numTxns, byte[] data)
throws IOException {
Preconditions.checkArgument(firstTxId == lastAppliedTxId + 1,
"Received txn batch starting at %s but expected %s",
firstTxId, lastAppliedTxId + 1);
assert backupInputStream.length() == 0 : "backup input stream is not empty";
try {
if (LOG.isTraceEnabled()) {
LOG.trace("data:" + StringUtils.byteToHexString(data));
}
FSEditLogLoader logLoader =
new FSEditLogLoader(getNamesystem(), lastAppliedTxId);
int logVersion = storage.getLayoutVersion();
backupInputStream.setBytes(data, logVersion);
long numTxnsAdvanced = logLoader.loadEditRecords(
backupInputStream, true, lastAppliedTxId + 1, null, null);
if (numTxnsAdvanced != numTxns) {
throw new IOException("Batch of txns starting at txnid " +
firstTxId + " was supposed to contain " + numTxns +
" transactions, but we were only able to advance by " +
numTxnsAdvanced);
}
lastAppliedTxId = logLoader.getLastAppliedTxId();
getNamesystem().writeLock();
try {
getNamesystem().dir.updateCountForQuota();
} finally {
getNamesystem().writeUnlock();
}
} finally {
backupInputStream.clear();
}
}
/**
* Transition the BackupNode from JOURNAL_ONLY state to IN_SYNC state.
* This is done by repeated invocations of tryConvergeJournalSpool until
* we are caught up to the latest in-progress edits file.
*/
void convergeJournalSpool() throws IOException {
Preconditions.checkState(bnState == BNState.JOURNAL_ONLY,
"bad state: %s", bnState);
while (!tryConvergeJournalSpool()) {
;
}
assert bnState == BNState.IN_SYNC;
}
private boolean tryConvergeJournalSpool() throws IOException {
Preconditions.checkState(bnState == BNState.JOURNAL_ONLY,
"bad state: %s", bnState);
// This section is unsynchronized so we can continue to apply
// ahead of where we're reading, concurrently. Since the state
// is JOURNAL_ONLY at this point, we know that lastAppliedTxId
// doesn't change, and curSegmentTxId only increases
while (lastAppliedTxId < editLog.getCurSegmentTxId() - 1) {
long target = editLog.getCurSegmentTxId();
LOG.info("Loading edits into backupnode to try to catch up from txid "
+ lastAppliedTxId + " to " + target);
FSImageTransactionalStorageInspector inspector =
new FSImageTransactionalStorageInspector();
storage.inspectStorageDirs(inspector);
editLog.recoverUnclosedStreams();
Iterable<EditLogInputStream> editStreamsAll
= editLog.selectInputStreams(lastAppliedTxId, target - 1);
// remove inprogress
List<EditLogInputStream> editStreams = Lists.newArrayList();
for (EditLogInputStream s : editStreamsAll) {
if (s.getFirstTxId() != editLog.getCurSegmentTxId()) {
editStreams.add(s);
}
}
loadEdits(editStreams, getNamesystem());
}
// now, need to load the in-progress file
synchronized (this) {
if (lastAppliedTxId != editLog.getCurSegmentTxId() - 1) {
LOG.debug("Logs rolled while catching up to current segment");
return false; // drop lock and try again to load local logs
}
EditLogInputStream stream = null;
Collection<EditLogInputStream> editStreams
= getEditLog().selectInputStreams(
getEditLog().getCurSegmentTxId(),
getEditLog().getCurSegmentTxId());
for (EditLogInputStream s : editStreams) {
if (s.getFirstTxId() == getEditLog().getCurSegmentTxId()) {
stream = s;
}
break;
}
if (stream == null) {
LOG.warn("Unable to find stream starting with " + editLog.getCurSegmentTxId()
+ ". This indicates that there is an error in synchronization in BackupImage");
return false;
}
try {
long remainingTxns = getEditLog().getLastWrittenTxId() - lastAppliedTxId;
LOG.info("Going to finish converging with remaining " + remainingTxns
+ " txns from in-progress stream " + stream);
FSEditLogLoader loader =
new FSEditLogLoader(getNamesystem(), lastAppliedTxId);
loader.loadFSEdits(stream, lastAppliedTxId + 1);
lastAppliedTxId = loader.getLastAppliedTxId();
assert lastAppliedTxId == getEditLog().getLastWrittenTxId();
} finally {
FSEditLog.closeAllStreams(editStreams);
}
LOG.info("Successfully synced BackupNode with NameNode at txnid " +
lastAppliedTxId);
setState(BNState.IN_SYNC);
}
return true;
}
/**
* Transition edit log to a new state, logging as necessary.
*/
private synchronized void setState(BNState newState) {
if (LOG.isDebugEnabled()) {
LOG.debug("State transition " + bnState + " -> " + newState);
}
bnState = newState;
}
/**
* Receive a notification that the NameNode has begun a new edit log.
* This causes the BN to also start the new edit log in its local
* directories.
*/
synchronized void namenodeStartedLogSegment(long txid) throws IOException {
editLog.startLogSegment(txid, true, namesystem.getEffectiveLayoutVersion());
if (bnState == BNState.DROP_UNTIL_NEXT_ROLL) {
setState(BNState.JOURNAL_ONLY);
}
if (stopApplyingEditsOnNextRoll) {
if (bnState == BNState.IN_SYNC) {
LOG.info("Stopped applying edits to prepare for checkpoint.");
setState(BNState.JOURNAL_ONLY);
}
stopApplyingEditsOnNextRoll = false;
notifyAll();
}
}
/**
* Request that the next time the BN receives a log roll, it should
* stop applying the edits log to the local namespace. This is
* typically followed on by a call to {@link #waitUntilNamespaceFrozen()}
*/
synchronized void freezeNamespaceAtNextRoll() {
stopApplyingEditsOnNextRoll = true;
}
/**
* After {@link #freezeNamespaceAtNextRoll()} has been called, wait until
* the BN receives notification of the next log roll.
*/
synchronized void waitUntilNamespaceFrozen() throws IOException {
if (bnState != BNState.IN_SYNC) return;
LOG.info("Waiting until the NameNode rolls its edit logs in order " +
"to freeze the BackupNode namespace.");
while (bnState == BNState.IN_SYNC) {
Preconditions.checkState(stopApplyingEditsOnNextRoll,
"If still in sync, we should still have the flag set to " +
"freeze at next roll");
try {
wait();
} catch (InterruptedException ie) {
LOG.warn("Interrupted waiting for namespace to freeze", ie);
throw new IOException(ie);
}
}
LOG.info("BackupNode namespace frozen.");
}
/**
* Override close() so that we don't finalize edit logs.
*/
@Override
public synchronized void close() throws IOException {
editLog.abortCurrentLogSegment();
storage.close();
}
}
| |
package ui;
import entity_utils.ProjectUtils;
import entity_utils.TaskUtils;
import javafx.beans.value.ChangeListener;
import javafx.beans.value.ObservableValue;
import javafx.collections.FXCollections;
import javafx.collections.ListChangeListener;
import javafx.collections.ObservableList;
import javafx.event.ActionEvent;
import javafx.event.EventHandler;
import javafx.scene.control.*;
import javafx.scene.control.cell.PropertyValueFactory;
import javafx.scene.control.cell.TextFieldTableCell;
import javafx.scene.layout.HBox;
import logic.MaximumProfitStrategy;
import logic.StrategyContext;
import models.Project;
import models.SystemData;
import models.Task;
import servers.LocalServer;
import java.io.IOException;
import java.util.LinkedList;
import java.util.List;
/**
* models gui page Projects
*/
public class ProjectsPage extends AbstractPage implements ChangeListener, EventHandler<ActionEvent>{
final ObservableList<Project> data = FXCollections.observableArrayList(SystemData.getAllProjectsMap().values());
private List<Project> selectedProjects = new LinkedList<>();
private Button maxProfitRecButton, allocateButton, deAllocateButton;
private TableColumn name, price, startTime, endTime;
private ListChangeListener<Project> multipleSelectionListener;
ChangeListener changeListener;
private static ProjectsPage ourInstance = new ProjectsPage();
public static ProjectsPage getInstance() {
return ourInstance;
}
private ProjectsPage() {
super();
final ToggleGroup group = new ToggleGroup();
RadioButton viewModeButton = new RadioButton("View Mode");
viewModeButton.setToggleGroup(group);
viewModeButton.setSelected(true);
viewModeButton.setUserData(ProjectsPage.Mode.View);
RadioButton allocateModeButton = new RadioButton("Allocation Mode");
allocateModeButton.setToggleGroup(group);
allocateModeButton.setUserData(ProjectsPage.Mode.Allocate);
group.selectedToggleProperty().addListener(new ChangeListener<Toggle>(){
public void changed(ObservableValue<? extends Toggle> ov,
Toggle old_toggle, Toggle new_toggle) {
Toggle toggle = group.getSelectedToggle();
if (toggle!=null) {
if (toggle.getUserData().equals(ProjectsPage.Mode.Allocate)) {
table.getSelectionModel().setSelectionMode(SelectionMode.MULTIPLE);
table.getSelectionModel().getSelectedItems().addListener(multipleSelectionListener);
table.getSelectionModel().selectedItemProperty().removeListener(changeListener);
allocateButton.setVisible(true);
deAllocateButton.setVisible(true);
maxProfitRecButton.setVisible(true);
} else if (toggle.getUserData().equals(ProjectsPage.Mode.View)) {
table.getSelectionModel().setSelectionMode(SelectionMode.SINGLE);
table.getSelectionModel().getSelectedItems().removeListener(multipleSelectionListener);
table.getSelectionModel().selectedItemProperty().addListener(changeListener);
table.getSelectionModel().clearAndSelect(0);
allocateButton.setVisible(false);
deAllocateButton.setVisible(false);
maxProfitRecButton.setVisible(false);
}
}
}
});
top.getChildren().add(viewModeButton);
top.getChildren().add(allocateModeButton);
constructAllocationMode();
final TextField addName = new TextField();
addName.setPromptText("Name");
addName.setMaxWidth(100);
final TextField addPrice = new TextField();
addPrice.setPrefWidth(100);
addPrice.setPromptText("Price");
addButton.setTooltip(new Tooltip("Add project"));
addButton.setOnAction(new EventHandler<ActionEvent>() {
@Override
public void handle(ActionEvent e) {
try {
Integer price = null;
if (!addPrice.getText().equals("")) {
price = Integer.parseInt(addPrice.getText());
}
if (price!=null && price<0) {
MainUI.alertError("Invalid input", "Employee salary cannot be of a negative value ");
return;
}
if (addName.getText().equals("")) {
MainUI.alertError("Invalid input", "A project must have a name.");
return;
}
Project newProject = new Project(addName.getText(), Integer.parseInt(addPrice.getText()));;
ProjectUtils.createEntity(Project.class, newProject);
Integer id = newProject.getId();
if (id!=null) {
newProject = ProjectUtils.getProject(newProject.getId());
SystemData.getAllProjectsMap().put(id, newProject);
data.add(newProject);
addName.clear();
addPrice.clear();
table.refresh();
}
} catch (NumberFormatException | ClassCastException exc) {
MainUI.alertError("Invalid input", "Please enter only numbers to the Price field or leave it blank.");
}
}
});
bottom.getChildren().addAll(addName, addPrice, addButton);
setCenter(addTable("Projects"));
}
private void constructAllocationMode() {
maxProfitRecButton = new Button("PROFIT REC");
maxProfitRecButton.getStyleClass().add("simple");
maxProfitRecButton.setOnAction(this);
allocateButton = new Button("ALLOCATE");
allocateButton.getStyleClass().add("simple");
allocateButton.setOnAction(new EventHandler<ActionEvent>() {
@Override
public void handle(ActionEvent event) {
selectedProjects.forEach(project -> {
try {
project.getTasks().forEach(t-> {
Task task = SystemData.getAllTasksMap().get(t.getId());
if (task.getEmployee()==null && task.getRecommendedAssigneeName()!=null) {
task.setEmployee(task.getRecommendedAssignee());
task.setRecommendedAssignee(null);
TaskUtils.updateEntity(task);
}
});
project.setCost(project.getEstimatedCost());
project.setProfit(project.getEstimatedProfit());
project.setEstimatedCost(null);
project.setEstimatedProfit(null);
ProjectUtils.updateEntity(project);
} catch (IOException e) {
e.printStackTrace();
}
});
table.refresh();
}
});
deAllocateButton = new Button("DEALLOCATE");
deAllocateButton.getStyleClass().add("simple");
deAllocateButton.setOnAction(new EventHandler<ActionEvent>() {
@Override
public void handle(ActionEvent event) {
selectedProjects.forEach(project -> {
try {
project.getTasks().forEach(t -> {
Task task = SystemData.getAllTasksMap().get(t.getId());
if (task.getEmployee() != null) {
task.setEmployee(null);
TaskUtils.updateEntity(task);
}
});
project.setCost(null);
project.setProfit(null);
ProjectUtils.updateEntity(project);
} catch (IOException e) {
e.printStackTrace();
}
});
table.refresh();
}
});
allocateButton.setVisible(false);
deAllocateButton.setVisible(false);
maxProfitRecButton.setVisible(false);
top.getChildren().add(maxProfitRecButton);
top.getChildren().add(allocateButton);
top.getChildren().add(deAllocateButton);
}
@Override
TableView addTable(String pageName) {
table = super.addTable(pageName);
TableColumn id = new TableColumn("ID");
name = new TableColumn("Name");
price = new TableColumn("Price");
startTime = new TableColumn("Start time");
endTime = new TableColumn("End Time");
TableColumn cost = new TableColumn("Cost");
TableColumn profit = new TableColumn("Profit");
TableColumn estimatedCost = new TableColumn("Estimated Cost");
TableColumn estimatedProfit = new TableColumn("Estimated Profit");
id.setMinWidth(40);
id.setCellValueFactory(
new PropertyValueFactory<Task, String>("id"));
name.setMinWidth(60);
name.setCellValueFactory(
new PropertyValueFactory<Task, String>("name"));
startTime.setMinWidth(100);
startTime.setCellValueFactory(
new PropertyValueFactory<Task, String>("startTime"));
endTime.setMinWidth(100);
endTime.setCellValueFactory(
new PropertyValueFactory<Task, String>("endTime"));
price.setMinWidth(60);
price.setCellValueFactory(
new PropertyValueFactory<Task, String>("price"));
cost.setMinWidth(60);
cost.setCellValueFactory(
new PropertyValueFactory<Task, String>("cost"));
profit.setMinWidth(60);
profit.setCellValueFactory(
new PropertyValueFactory<Task, String>("profit"));
estimatedCost.setMinWidth(60);
estimatedCost.setCellValueFactory(
new PropertyValueFactory<Task, String>("estimatedCost"));
estimatedProfit.setMinWidth(60);
estimatedProfit.setCellValueFactory(
new PropertyValueFactory<Task, String>("estimatedProfit"));
table.getColumns().addAll(id, name, startTime, endTime, price, estimatedCost, estimatedProfit);
table.setItems(data);
setEditableCells();
changeListener = new ChangeListener() {
@Override
public void changed(ObservableValue obs, Object oldSelection, Object newSelection) {
Project project = null;
project = (Project) newSelection;
if (project!=null) {
try {
String tasks = "---";
if (project.getTasks()!=null){
tasks="";
for (Task t : project.getTasks()) {
Task task = SystemData.getAllTasksMap().get(t.getId());
tasks += task.getName() + "(ID=)"+task.getId()
+"\n\t recommended assignee - "+ task.getRecommendedAssigneeName()
+"\n\t assigned employee - "+ task.getEmployeeName()+"\n";
}
}
cardValues= new String[]{
project.getId().toString(),
project.getName(),
project.getDescription(),
project.getStartTime()==null ? "" : project.getStartTime().toString(),
project.getEndTime()==null ? "" : project.getEndTime().toString(),
project.getCost()==null ? "" : project.getCost().toString(),
project.getProfit()==null ? "" : project.getProfit().toString(),
tasks,
};
setNewCard(cardValues, (Project) newSelection);
} catch (IOException e) {
e.printStackTrace();
}
}
}
};
table.getSelectionModel().selectedItemProperty().addListener(changeListener);
multipleSelectionListener = new ListChangeListener<Project>() {
@Override
public void onChanged(Change<? extends Project> c) {
if (!selectedProjects.isEmpty()) {
selectedProjects.clear();
}
selectedProjects.addAll(c.getList());
}
};
deleteEntryButton.setOnAction(new EventHandler<ActionEvent>() {
@Override
public void handle(ActionEvent event) {
ObservableList selectedCells = table.getSelectionModel().getSelectedCells();
TablePosition tablePosition = (TablePosition) selectedCells.get(0);
Project toRemove = ((Project) table.getItems().get(tablePosition.getRow()));
ProjectUtils.deleteEntity(Project.class, toRemove);
if (ProjectUtils.getProject(toRemove.getId())==null) {
SystemData.getAllProjectsMap().remove(toRemove.getId());
data.remove(toRemove);
table.refresh();
} else {
MainUI.alertError("Cannot delete", "There might be some problem connecting to the database.");
}
}
});
return table;
}
private void setEditableCells() {
name.setCellFactory(TextFieldTableCell.forTableColumn());
name.setOnEditCommit(
new EventHandler<TableColumn.CellEditEvent<Project, String>>() {
@Override
public void handle(TableColumn.CellEditEvent<Project, String> t) {
Project project = (Project) t.getTableView().getItems().get(t.getTablePosition().getRow());
project.setName(t.getNewValue());
}
}
);
//The following snippet of code is based on http://stackoverflow.com/a/34701925
price.setCellFactory(col -> new TextFieldTableCell<Project, Integer>(new EmployeesPage.EditIntegerStringConverter()) {
@Override
public void updateItem(Integer item, boolean empty) {
if (empty) {
super.updateItem(item, empty) ;
} else {
// if out of range, revert to previous value:
if (item!=null && item.intValue() < 0) {
item = getItem();
MainUI.alertError("Invalid input", "Please enter a positive number.");
}
super.updateItem(item, empty);
}
}
});
price.setOnEditCommit(
new EventHandler<TableColumn.CellEditEvent<Project, Integer>>() {
@Override
public void handle(TableColumn.CellEditEvent<Project, Integer> t) {
Integer price = t.getNewValue();
if (price!=null && price<=0) {
} else {
Project project = (Project) t.getTableView().getItems().get(t.getTablePosition().getRow());
project.setPrice(price);
}
}
}
);
}
HBox addCard() {
String[] names = {"ID", "Name", "Description", "Start date", "End Date", "Cost", "Profit", "Tasks"};
cardValues = new String[]{"", "", "", "", "", ""};
return super.addCard(names, cardValues);
}
@Override
public void changed(ObservableValue observable, Object oldValue, Object newValue) {
if (newValue != null) {
ObservableList selectedCells = table.getSelectionModel().getSelectedCells();
TablePosition tablePosition = (TablePosition) selectedCells.get(0);
Object val = tablePosition.getTableColumn().getCellData(newValue);
}
}
@Override
public void handle(ActionEvent event) {
if (selectedProjects ==null || selectedProjects.isEmpty()) {
MainUI.alertError("Invalid selection.", "Please select projects to allocate.");
return;
}
LocalServer.iLogger.info("MAX_PROFIT");
new StrategyContext(MaximumProfitStrategy.getInstance(),selectedProjects);
table.refresh();
MainUI.alertInformation("Allocation result", "Total number of unallocated projects: "+ StrategyContext.getNumOfUnallocatedProjects()
+ ". \nAmong them number of projects invalid for allocation: "+ StrategyContext.getNumOfProjectsInvalidForAllocation()
+ ". \nTotal profit from the selected projects is equal to: "+ StrategyContext.getTotalProfitFromSelectedProjects());
}
enum Mode {
View,
Allocate;
}
}
| |
package org.mtransit.parser.ca_guelph_transit_bus;
import static org.mtransit.commons.RegexUtils.DIGITS;
import static org.mtransit.commons.StringUtils.EMPTY;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.mtransit.commons.CharUtils;
import org.mtransit.commons.CleanUtils;
import org.mtransit.commons.StringUtils;
import org.mtransit.parser.DefaultAgencyTools;
import org.mtransit.parser.MTLog;
import org.mtransit.parser.gtfs.data.GRoute;
import org.mtransit.parser.gtfs.data.GStop;
import org.mtransit.parser.mt.data.MAgency;
import java.util.List;
import java.util.Locale;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
// http://data.open.guelph.ca/
// http://data.open.guelph.ca/dataset/guelph-transit-gtfs-data
// http://data.open.guelph.ca/datafiles/guelph-transit/guelph_transit_gtfs.zip
// OTHER: http://guelph.ca/uploads/google/google_transit.zip
public class GuelphTransitBusAgencyTools extends DefaultAgencyTools {
public static void main(@NotNull String[] args) {
new GuelphTransitBusAgencyTools().start(args);
}
@Nullable
@Override
public List<Locale> getSupportedLanguages() {
return LANG_EN;
}
@Override
public boolean defaultExcludeEnabled() {
return true;
}
@NotNull
@Override
public String getAgencyName() {
return "Guelph Transit";
}
@NotNull
@Override
public Integer getAgencyRouteType() {
return MAgency.ROUTE_TYPE_BUS;
}
private static final String COMMUNITY_BUS_RSN = "Com";
private static final long COMMUNITY_BUS_RID = 9_998L;
@Override
public boolean defaultRouteIdEnabled() {
return true;
}
@Override
public boolean useRouteShortNameForRouteId() {
return true;
}
@Nullable
@Override
public Long convertRouteIdFromShortNameNotSupported(@NotNull String routeShortName) {
if (COMMUNITY_BUS_RSN.equals(routeShortName)) {
return COMMUNITY_BUS_RID;
}
if ("GorEdi".equals(routeShortName)) {
return 10_001L;
}
if ("OD Sout".equals(routeShortName)
|| "ODSout".equals(routeShortName)) {
return 10_002L;
}
if ("VicClr".equals(routeShortName)) {
return 10_003L;
}
if ("WHanSco".equals(routeShortName)) {
return 10_004L;
}
return super.convertRouteIdFromShortNameNotSupported(routeShortName);
}
private static final Pattern ALL_WHITESPACES = Pattern.compile("\\s+", Pattern.CASE_INSENSITIVE);
@NotNull
@Override
public String cleanRouteShortName(@NotNull String routeShortName) {
routeShortName = ALL_WHITESPACES.matcher(routeShortName).replaceAll(EMPTY);
return routeShortName;
}
@Override
public boolean defaultRouteLongNameEnabled() {
return true;
}
private static final Pattern STARTS_WITH_ROUTE_RSN = Pattern.compile("(route[\\d]*[A-Z]*[\\-]?[\\s]*)", Pattern.CASE_INSENSITIVE);
@NotNull
@Override
public String cleanRouteLongName(@NotNull String routeLongName) {
routeLongName = STARTS_WITH_ROUTE_RSN.matcher(routeLongName).replaceAll(EMPTY);
if (StringUtils.isEmpty(routeLongName)) {
throw new MTLog.Fatal("getRouteLongName() > Unexpected route long name name '%s'!", routeLongName);
}
return CleanUtils.cleanLabel(routeLongName);
}
@Override
public boolean defaultAgencyColorEnabled() {
return true;
}
private static final String AGENCY_COLOR = "00A6E5"; // BLUE
@NotNull
@Override
public String getAgencyColor() {
return AGENCY_COLOR;
}
@SuppressWarnings("DuplicateBranchesInSwitch")
@Nullable
@Override
public String provideMissingRouteColor(@NotNull GRoute gRoute) {
final String rsnS = gRoute.getRouteShortName();
if (COMMUNITY_BUS_RSN.equals(rsnS)) {
return "D14625";
}
if (rsnS.startsWith("Zone ") //
|| rsnS.startsWith("NYE ")) {
return "ED1C24";
}
final Matcher matcher = DIGITS.matcher(rsnS);
if (matcher.find()) {
final int rsn = Integer.parseInt(matcher.group());
switch (rsn) {
// @formatter:off
case 1: return "EC008C";
case 2: return "EC008C";
case 3: return "91469B";
case 4: return "1988B7";
case 5: return "921B1E";
case 6: return "ED1C24";
case 7: return "682C91";
case 8: return "0082B1";
case 9: return "5C7AAE";
case 10: return "A54686";
case 11: return "5C7AAE";
case 12: return "008290";
case 13: return "811167";
case 14: return "485E88";
case 15: return "8F7140";
case 16: return "29712A";
case 17: return "CB640A";
case 18: return "CB640A";
case 20: return "556940";
case 40: return "005689";
case 41: return "405D18";
case 50: return "A54686"; // 50 U
case 51: return "405D18"; // 51 U
case 52: return "485E88"; // 52 U
case 56: return "ED1C24"; // 56 U
case 57: return "5C7AAE"; // 57 U
case 58: return "91469b"; // 58 U
case 99: return "4F832E ";
// @formatter:on
}
}
throw new MTLog.Fatal("getRouteColor() > Unexpected route color for '%s'!", gRoute);
}
@Override
public boolean directionFinderEnabled() {
return true;
}
private static final Pattern STARTS_WITH_RSN = Pattern.compile("(^[\\d]+ )", Pattern.CASE_INSENSITIVE);
private static final Pattern STARTS_W_COMMUNITY_BUS_ = Pattern.compile("(^(community bus|mainline) (?=(.{3,})))", Pattern.CASE_INSENSITIVE);
@NotNull
@Override
public String cleanTripHeadsign(@NotNull String tripHeadsign) {
tripHeadsign = CleanUtils.keepToAndRemoveVia(tripHeadsign);
tripHeadsign = STARTS_WITH_RSN.matcher(tripHeadsign).replaceAll(EMPTY);
tripHeadsign = STARTS_W_COMMUNITY_BUS_.matcher(tripHeadsign).replaceAll(EMPTY);
tripHeadsign = CleanUtils.CLEAN_AT.matcher(tripHeadsign).replaceAll(CleanUtils.CLEAN_AT_REPLACEMENT);
tripHeadsign = CleanUtils.CLEAN_AND.matcher(tripHeadsign).replaceAll(CleanUtils.CLEAN_AND_REPLACEMENT);
tripHeadsign = CleanUtils.cleanNumbers(tripHeadsign);
tripHeadsign = CleanUtils.cleanBounds(tripHeadsign);
tripHeadsign = CleanUtils.cleanStreetTypes(tripHeadsign);
return CleanUtils.cleanLabel(tripHeadsign);
}
private static final Pattern CLEAN_DEPART_ARRIVE = Pattern.compile("( (arrival|depart)$)", Pattern.CASE_INSENSITIVE);
@NotNull
@Override
public String cleanStopName(@NotNull String gStopName) {
gStopName = CleanUtils.CLEAN_AT.matcher(gStopName).replaceAll(CleanUtils.CLEAN_AT_REPLACEMENT);
gStopName = CLEAN_DEPART_ARRIVE.matcher(gStopName).replaceAll(EMPTY);
gStopName = CleanUtils.cleanBounds(gStopName);
gStopName = CleanUtils.cleanStreetTypes(gStopName);
gStopName = CleanUtils.cleanNumbers(gStopName);
return CleanUtils.cleanLabel(gStopName);
}
private static final String DASH = "-";
private static final String UNDERSCORE = "_";
@Override
public int getStopId(@NotNull GStop gStop) {
//noinspection deprecation
final String stopId = gStop.getStopId();
if (stopId.equals("Route5A-0549_Victoria Road South at Macalister Boulevard southbound")) {
return 619;
}
if (gStop.getStopCode().length() > 0 && CharUtils.isDigitsOnly(gStop.getStopCode())) {
return Integer.parseInt(gStop.getStopCode());
}
final int indexOfDASH = stopId.indexOf(DASH);
final int indexOfUNDERSCORE = stopId.indexOf(UNDERSCORE, indexOfDASH);
if (indexOfDASH >= 0 && indexOfUNDERSCORE >= 0) {
return Integer.parseInt(stopId.substring(indexOfDASH + 1, indexOfUNDERSCORE));
}
throw new MTLog.Fatal("Error while getting stop ID for %s!", gStop);
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.index;
import org.apache.lucene.search.Query;
import org.elasticsearch.Version;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.KeywordFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.TextSearchInfo;
import org.elasticsearch.index.mapper.ValueFetcher;
import org.elasticsearch.index.query.SearchExecutionContext;
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.ESTestCase;
import java.util.Collections;
import java.util.function.Supplier;
import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
import static org.elasticsearch.index.IndexSettingsTests.newIndexMeta;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
public class IndexSortSettingsTests extends ESTestCase {
private static IndexSettings indexSettings(Settings settings) {
return new IndexSettings(newIndexMeta("test", settings), Settings.EMPTY);
}
public void testNoIndexSort() {
IndexSettings indexSettings = indexSettings(EMPTY_SETTINGS);
assertFalse(indexSettings.getIndexSortConfig().hasIndexSort());
}
public void testSimpleIndexSort() {
Settings settings = Settings.builder()
.put("index.sort.field", "field1")
.put("index.sort.order", "asc")
.put("index.sort.mode", "max")
.put("index.sort.missing", "_last")
.build();
IndexSettings indexSettings = indexSettings(settings);
IndexSortConfig config = indexSettings.getIndexSortConfig();
assertTrue(config.hasIndexSort());
assertThat(config.sortSpecs.length, equalTo(1));
assertThat(config.sortSpecs[0].field, equalTo("field1"));
assertThat(config.sortSpecs[0].order, equalTo(SortOrder.ASC));
assertThat(config.sortSpecs[0].missingValue, equalTo("_last"));
assertThat(config.sortSpecs[0].mode, equalTo(MultiValueMode.MAX));
}
public void testIndexSortWithArrays() {
Settings settings = Settings.builder()
.putList("index.sort.field", "field1", "field2")
.putList("index.sort.order", "asc", "desc")
.putList("index.sort.missing", "_last", "_first")
.build();
IndexSettings indexSettings = indexSettings(settings);
IndexSortConfig config = indexSettings.getIndexSortConfig();
assertTrue(config.hasIndexSort());
assertThat(config.sortSpecs.length, equalTo(2));
assertThat(config.sortSpecs[0].field, equalTo("field1"));
assertThat(config.sortSpecs[1].field, equalTo("field2"));
assertThat(config.sortSpecs[0].order, equalTo(SortOrder.ASC));
assertThat(config.sortSpecs[1].order, equalTo(SortOrder.DESC));
assertThat(config.sortSpecs[0].missingValue, equalTo("_last"));
assertThat(config.sortSpecs[1].missingValue, equalTo("_first"));
assertNull(config.sortSpecs[0].mode);
assertNull(config.sortSpecs[1].mode);
}
public void testInvalidIndexSort() {
final Settings settings = Settings.builder()
.put("index.sort.field", "field1")
.put("index.sort.order", "asc, desc")
.build();
IllegalArgumentException exc =
expectThrows(IllegalArgumentException.class, () -> indexSettings(settings));
assertThat(exc.getMessage(), containsString("index.sort.field:[field1] index.sort.order:[asc, desc], size mismatch"));
}
public void testInvalidIndexSortWithArray() {
final Settings settings = Settings.builder()
.put("index.sort.field", "field1")
.putList("index.sort.order", new String[] {"asc", "desc"})
.build();
IllegalArgumentException exc =
expectThrows(IllegalArgumentException.class, () -> indexSettings(settings));
assertThat(exc.getMessage(),
containsString("index.sort.field:[field1] index.sort.order:[asc, desc], size mismatch"));
}
public void testInvalidOrder() {
final Settings settings = Settings.builder()
.put("index.sort.field", "field1")
.put("index.sort.order", "invalid")
.build();
IllegalArgumentException exc =
expectThrows(IllegalArgumentException.class, () -> indexSettings(settings));
assertThat(exc.getMessage(), containsString("Illegal sort order:invalid"));
}
public void testInvalidMode() {
final Settings settings = Settings.builder()
.put("index.sort.field", "field1")
.put("index.sort.mode", "invalid")
.build();
IllegalArgumentException exc =
expectThrows(IllegalArgumentException.class, () -> indexSettings(settings));
assertThat(exc.getMessage(), containsString("Illegal sort mode: invalid"));
}
public void testInvalidMissing() {
final Settings settings = Settings.builder()
.put("index.sort.field", "field1")
.put("index.sort.missing", "default")
.build();
IllegalArgumentException exc =
expectThrows(IllegalArgumentException.class, () -> indexSettings(settings));
assertThat(exc.getMessage(), containsString("Illegal missing value:[default]," +
" must be one of [_last, _first]"));
}
public void testIndexSorting() {
IndexSettings indexSettings = indexSettings(Settings.builder().put("index.sort.field", "field").build());
IndexSortConfig config = indexSettings.getIndexSortConfig();
assertTrue(config.hasIndexSort());
IndicesFieldDataCache cache = new IndicesFieldDataCache(Settings.EMPTY, null);
NoneCircuitBreakerService circuitBreakerService = new NoneCircuitBreakerService();
final IndexFieldDataService indexFieldDataService = new IndexFieldDataService(indexSettings, cache, circuitBreakerService, null);
MappedFieldType fieldType = new MappedFieldType("field", false, false, false, TextSearchInfo.NONE, Collections.emptyMap()) {
@Override
public String typeName() {
return null;
}
@Override
public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier<SearchLookup> searchLookup) {
searchLookup.get();
return null;
}
@Override
public ValueFetcher valueFetcher(SearchExecutionContext context, String format) {
throw new UnsupportedOperationException();
}
@Override
public Query termQuery(Object value, SearchExecutionContext context) {
throw new UnsupportedOperationException();
}
};
IllegalArgumentException iae = expectThrows(
IllegalArgumentException.class,
() -> config.buildIndexSort(
field -> fieldType,
(ft, searchLookupSupplier) -> indexFieldDataService.getForField(ft, "index", searchLookupSupplier)
)
);
assertEquals("docvalues not found for index sort field:[field]", iae.getMessage());
assertThat(iae.getCause(), instanceOf(UnsupportedOperationException.class));
assertEquals("index sorting not supported on runtime field [field]", iae.getCause().getMessage());
}
public void testSortingAgainstAliases() {
IndexSettings indexSettings = indexSettings(Settings.builder().put("index.sort.field", "field").build());
IndexSortConfig config = indexSettings.getIndexSortConfig();
assertTrue(config.hasIndexSort());
IndicesFieldDataCache cache = new IndicesFieldDataCache(Settings.EMPTY, null);
NoneCircuitBreakerService circuitBreakerService = new NoneCircuitBreakerService();
final IndexFieldDataService indexFieldDataService = new IndexFieldDataService(indexSettings, cache, circuitBreakerService, null);
MappedFieldType mft = new KeywordFieldMapper.KeywordFieldType("aliased");
Exception e = expectThrows(IllegalArgumentException.class, () -> config.buildIndexSort(
field -> mft,
(ft, s) -> indexFieldDataService.getForField(ft, "index", s)
));
assertEquals("Cannot use alias [field] as an index sort field", e.getMessage());
}
public void testSortingAgainstAliasesPre713() {
IndexSettings indexSettings = indexSettings(Settings.builder()
.put("index.version.created", Version.V_7_12_0)
.put("index.sort.field", "field").build());
IndexSortConfig config = indexSettings.getIndexSortConfig();
assertTrue(config.hasIndexSort());
IndicesFieldDataCache cache = new IndicesFieldDataCache(Settings.EMPTY, null);
NoneCircuitBreakerService circuitBreakerService = new NoneCircuitBreakerService();
final IndexFieldDataService indexFieldDataService = new IndexFieldDataService(indexSettings, cache, circuitBreakerService, null);
MappedFieldType mft = new KeywordFieldMapper.KeywordFieldType("aliased");
config.buildIndexSort(
field -> mft,
(ft, s) -> indexFieldDataService.getForField(ft, "index", s));
assertWarnings("Index sort for index [test] defined on field [field] which resolves to field [aliased]. " +
"You will not be able to define an index sort over aliased fields in new indexes");
}
}
| |
package net.i2p.router.transport.udp;
import java.io.IOException;
import java.net.DatagramSocket;
import java.net.InetAddress;
import java.net.Inet4Address;
import java.net.Inet6Address;
import java.net.SocketException;
import java.util.concurrent.atomic.AtomicInteger;
import net.i2p.router.RouterContext;
import net.i2p.util.Log;
/**
* Coordinate the low-level datagram socket, creating and managing the UDPSender and
* UDPReceiver.
*/
class UDPEndpoint {
private final RouterContext _context;
private final Log _log;
private int _listenPort;
private final UDPTransport _transport;
private UDPSender _sender;
private UDPReceiver _receiver;
private DatagramSocket _socket;
private final InetAddress _bindAddress;
private final boolean _isIPv4, _isIPv6;
private static final AtomicInteger _counter = new AtomicInteger();
/**
* @param transport may be null for unit testing ONLY
* @param listenPort -1 or the requested port, may not be honored
* @param bindAddress null ok
*/
public UDPEndpoint(RouterContext ctx, UDPTransport transport, int listenPort, InetAddress bindAddress) {
_context = ctx;
_log = ctx.logManager().getLog(UDPEndpoint.class);
_transport = transport;
_bindAddress = bindAddress;
_listenPort = listenPort;
_isIPv4 = bindAddress == null || bindAddress instanceof Inet4Address;
_isIPv6 = bindAddress == null || bindAddress instanceof Inet6Address;
}
/** caller should call getListenPort() after this to get the actual bound port and determine success */
public synchronized void startup() throws SocketException {
if (_log.shouldLog(Log.DEBUG))
_log.debug("Starting up the UDP endpoint");
shutdown();
_socket = getSocket();
if (_socket == null) {
_log.log(Log.CRIT, "UDP Unable to open a port");
throw new SocketException("SSU Unable to bind to a port on " + _bindAddress);
}
int count = _counter.incrementAndGet();
_sender = new UDPSender(_context, _socket, "UDPSender " + count);
_sender.startup();
if (_transport != null) {
_receiver = new UDPReceiver(_context, _transport, _socket, "UDPReceiver " + count);
_receiver.startup();
}
}
public synchronized void shutdown() {
if (_sender != null) {
_sender.shutdown();
_receiver.shutdown();
}
if (_socket != null) {
_socket.close();
}
}
public void setListenPort(int newPort) { _listenPort = newPort; }
/*******
public void updateListenPort(int newPort) {
if (newPort == _listenPort) return;
try {
if (_bindAddress == null)
_socket = new DatagramSocket(_listenPort);
else
_socket = new DatagramSocket(_listenPort, _bindAddress);
_sender.updateListeningPort(_socket, newPort);
// note: this closes the old socket, so call this after the sender!
_receiver.updateListeningPort(_socket, newPort);
_listenPort = newPort;
} catch (SocketException se) {
if (_log.shouldLog(Log.ERROR))
_log.error("Unable to bind on " + _listenPort);
}
}
********/
/** 8998 is monotone, and 31000 is the wrapper outbound, so let's stay between those */
public static final String PROP_MIN_PORT = "i2np.udp.minPort";
public static final String PROP_MAX_PORT = "i2np.udp.maxPort";
private static final int MIN_RANDOM_PORT = 9111;
private static final int MAX_RANDOM_PORT = 30777;
private static final int MAX_PORT_RETRIES = 20;
/**
* Open socket using requested port in _listenPort and bind host in _bindAddress.
* If _listenPort <= 0, or requested port is busy, repeatedly try a new random port.
* @return null on failure
* Sets _listenPort to actual port or -1 on failure
*/
private DatagramSocket getSocket() {
DatagramSocket socket = null;
int port = _listenPort;
if (port > 0 && port < 1024)
_log.logAlways(Log.WARN, "Specified UDP port is " + port + ", ports lower than 1024 not recommended");
for (int i = 0; i < MAX_PORT_RETRIES; i++) {
if (port <= 0) {
// try random ports rather than just do new DatagramSocket()
// so we stay out of the way of other I2P stuff
port = selectRandomPort(_context);
}
try {
if (_bindAddress == null)
socket = new DatagramSocket(port);
else
socket = new DatagramSocket(port, _bindAddress);
break;
} catch (SocketException se) {
if (_log.shouldLog(Log.WARN))
_log.warn("Binding to port " + port + " failed", se);
}
port = -1;
}
if (socket == null) {
_log.log(Log.CRIT, "SSU Unable to bind to a port on " + _bindAddress);
} else if (port != _listenPort) {
if (_listenPort > 0)
_log.error("SSU Unable to bind to requested port " + _listenPort + ", using random port " + port);
else
_log.logAlways(Log.INFO, "UDP selected random port " + port);
}
_listenPort = port;
return socket;
}
/**
* Pick a random port between the configured boundaries
* @since IPv6
*/
public static int selectRandomPort(RouterContext ctx) {
int minPort = Math.min(65535, Math.max(1, ctx.getProperty(PROP_MIN_PORT, MIN_RANDOM_PORT)));
int maxPort = Math.min(65535, Math.max(minPort, ctx.getProperty(PROP_MAX_PORT, MAX_RANDOM_PORT)));
return minPort + ctx.random().nextInt(1 + maxPort - minPort);
}
/** call after startup() to get actual port or -1 on startup failure */
public int getListenPort() { return _listenPort; }
public UDPSender getSender() { return _sender; }
/**
* Add the packet to the outobund queue to be sent ASAP (as allowed by
* the bandwidth limiter)
* BLOCKING if queue is full.
*/
public void send(UDPPacket packet) {
_sender.add(packet);
}
/**
* Blocking call to receive the next inbound UDP packet from any peer.
*
* UNIT TESTING ONLY. Direct from the socket.
* In normal operation, UDPReceiver thread injects to PacketHandler queue.
*
* @return null if we have shut down, or on failure
*/
public UDPPacket receive() {
UDPPacket packet = UDPPacket.acquire(_context, true);
try {
_socket.receive(packet.getPacket());
return packet;
} catch (IOException ioe) {
packet.release();
return null;
}
}
/**
* Clear outbound queue, probably in preparation for sending destroy() to everybody.
* @since 0.9.2
*/
public void clearOutbound() {
if (_sender != null)
_sender.clear();
}
/**
* @return true for wildcard too
* @since IPv6
*/
public boolean isIPv4() {
return _isIPv4;
}
/**
* @return true for wildcard too
* @since IPv6
*/
public boolean isIPv6() {
return _isIPv6;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.tests.integration.mqtt.imported;
import javax.net.ssl.KeyManager;
import javax.net.ssl.SSLContext;
import javax.net.ssl.TrustManager;
import javax.net.ssl.X509TrustManager;
import java.io.File;
import java.io.IOException;
import java.security.ProtectionDomain;
import java.security.SecureRandom;
import java.security.cert.CertificateException;
import java.security.cert.X509Certificate;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import io.netty.handler.codec.mqtt.MqttMessage;
import org.apache.activemq.artemis.api.core.ActiveMQException;
import org.apache.activemq.artemis.api.core.SimpleString;
import org.apache.activemq.artemis.api.core.TransportConfiguration;
import org.apache.activemq.artemis.core.config.Configuration;
import org.apache.activemq.artemis.core.protocol.mqtt.MQTTInterceptor;
import org.apache.activemq.artemis.core.remoting.impl.netty.TransportConstants;
import org.apache.activemq.artemis.core.server.ActiveMQServer;
import org.apache.activemq.artemis.core.settings.impl.AddressSettings;
import org.apache.activemq.artemis.jms.client.ActiveMQConnectionFactory;
import org.apache.activemq.artemis.spi.core.protocol.RemotingConnection;
import org.apache.activemq.artemis.tests.util.ActiveMQTestBase;
import org.fusesource.mqtt.client.MQTT;
import org.fusesource.mqtt.client.Tracer;
import org.fusesource.mqtt.codec.MQTTFrame;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.rules.TestName;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static java.util.Collections.singletonList;
public class MQTTTestSupport extends ActiveMQTestBase {
private ActiveMQServer server;
private static final Logger LOG = LoggerFactory.getLogger(MQTTTestSupport.class);
protected int port = 1883;
protected ActiveMQConnectionFactory cf;
protected LinkedList<Throwable> exceptions = new LinkedList<>();
protected boolean persistent;
protected String protocolConfig;
protected String protocolScheme;
protected boolean useSSL;
public static final int AT_MOST_ONCE = 0;
public static final int AT_LEAST_ONCE = 1;
public static final int EXACTLY_ONCE = 2;
@Rule
public TestName name = new TestName();
public MQTTTestSupport() {
this.protocolScheme = "mqtt";
this.useSSL = false;
cf = new ActiveMQConnectionFactory(false, new TransportConfiguration(ActiveMQTestBase.NETTY_CONNECTOR_FACTORY));
}
public File basedir() throws IOException {
ProtectionDomain protectionDomain = getClass().getProtectionDomain();
return new File(new File(protectionDomain.getCodeSource().getLocation().getPath()), "../..").getCanonicalFile();
}
@Override
public String getName() {
return name.getMethodName();
}
public ActiveMQServer getServer() {
return server;
}
@Override
@Before
public void setUp() throws Exception {
String basedir = basedir().getPath();
System.setProperty("javax.net.ssl.trustStore", basedir + "/src/test/resources/client.keystore");
System.setProperty("javax.net.ssl.trustStorePassword", "password");
System.setProperty("javax.net.ssl.trustStoreType", "jks");
System.setProperty("javax.net.ssl.keyStore", basedir + "/src/test/resources/server.keystore");
System.setProperty("javax.net.ssl.keyStorePassword", "password");
System.setProperty("javax.net.ssl.keyStoreType", "jks");
exceptions.clear();
startBroker();
}
@Override
@After
public void tearDown() throws Exception {
System.clearProperty("javax.net.ssl.trustStore");
System.clearProperty("javax.net.ssl.trustStorePassword");
System.clearProperty("javax.net.ssl.trustStoreType");
System.clearProperty("javax.net.ssl.keyStore");
System.clearProperty("javax.net.ssl.keyStorePassword");
System.clearProperty("javax.net.ssl.keyStoreType");
stopBroker();
super.tearDown();
}
public void startBroker() throws Exception {
// TODO Add SSL
super.setUp();
server = createServerForMQTT();
addCoreConnector();
addMQTTConnector();
AddressSettings addressSettings = new AddressSettings();
addressSettings.setMaxSizeBytes(999999999);
addressSettings.setAutoCreateJmsQueues(true);
server.getAddressSettingsRepository().addMatch("#", addressSettings);
server.start();
server.waitForActivation(10, TimeUnit.SECONDS);
}
private ActiveMQServer createServerForMQTT() throws Exception {
Configuration defaultConfig = createDefaultConfig(true).setIncomingInterceptorClassNames(singletonList(MQTTIncomingInterceptor.class.getName())).setOutgoingInterceptorClassNames(singletonList(MQTTOutoingInterceptor.class.getName()));
AddressSettings addressSettings = new AddressSettings();
addressSettings.setDeadLetterAddress(SimpleString.toSimpleString("DLA"));
addressSettings.setExpiryAddress(SimpleString.toSimpleString("EXPIRY"));
defaultConfig.getAddressesSettings().put("#", addressSettings);
return createServer(true, defaultConfig);
}
protected void addCoreConnector() throws Exception {
// Overrides of this method can add additional configuration options or add multiple
// MQTT transport connectors as needed, the port variable is always supposed to be
// assigned the primary MQTT connector's port.
Map<String, Object> params = new HashMap<>();
params.put(TransportConstants.PORT_PROP_NAME, "" + 5445);
params.put(TransportConstants.PROTOCOLS_PROP_NAME, "CORE");
TransportConfiguration transportConfiguration = new TransportConfiguration(NETTY_ACCEPTOR_FACTORY, params);
server.getConfiguration().getAcceptorConfigurations().add(transportConfiguration);
LOG.info("Added connector {} to broker", getProtocolScheme());
}
protected void addMQTTConnector() throws Exception {
// Overrides of this method can add additional configuration options or add multiple
// MQTT transport connectors as needed, the port variable is always supposed to be
// assigned the primary MQTT connector's port.
Map<String, Object> params = new HashMap<>();
params.put(TransportConstants.PORT_PROP_NAME, "" + port);
params.put(TransportConstants.PROTOCOLS_PROP_NAME, "MQTT");
TransportConfiguration transportConfiguration = new TransportConfiguration(NETTY_ACCEPTOR_FACTORY, params);
server.getConfiguration().getAcceptorConfigurations().add(transportConfiguration);
LOG.info("Added connector {} to broker", getProtocolScheme());
}
public void stopBroker() throws Exception {
if (server.isStarted()) {
server.stop();
server = null;
}
}
protected String getQueueName() {
return getClass().getName() + "." + name.getMethodName();
}
protected String getTopicName() {
return getClass().getName() + "." + name.getMethodName();
}
/**
* Initialize an MQTTClientProvider instance. By default this method uses the port that's
* assigned to be the TCP based port using the base version of addMQTTConnector. A subclass
* can either change the value of port or override this method to assign the correct port.
*
* @param provider the MQTTClientProvider instance to initialize.
* @throws Exception if an error occurs during initialization.
*/
protected void initializeConnection(MQTTClientProvider provider) throws Exception {
if (!isUseSSL()) {
provider.connect("tcp://localhost:" + port);
} else {
SSLContext ctx = SSLContext.getInstance("TLS");
ctx.init(new KeyManager[0], new TrustManager[]{new DefaultTrustManager()}, new SecureRandom());
provider.setSslContext(ctx);
provider.connect("ssl://localhost:" + port);
}
}
public String getProtocolScheme() {
return protocolScheme;
}
public void setProtocolScheme(String scheme) {
this.protocolScheme = scheme;
}
public boolean isUseSSL() {
return this.useSSL;
}
public void setUseSSL(boolean useSSL) {
this.useSSL = useSSL;
}
public boolean isPersistent() {
return persistent;
}
public int getPort() {
return this.port;
}
public boolean isSchedulerSupportEnabled() {
return false;
}
protected interface Task {
void run() throws Exception;
}
protected void within(int time, TimeUnit unit, Task task) throws InterruptedException {
long timeMS = unit.toMillis(time);
long deadline = System.currentTimeMillis() + timeMS;
while (true) {
try {
task.run();
return;
} catch (Throwable e) {
long remaining = deadline - System.currentTimeMillis();
if (remaining <= 0) {
if (e instanceof RuntimeException) {
throw (RuntimeException) e;
}
if (e instanceof Error) {
throw (Error) e;
}
throw new RuntimeException(e);
}
Thread.sleep(Math.min(timeMS / 10, remaining));
}
}
}
protected MQTTClientProvider getMQTTClientProvider() {
return new FuseMQTTClientProvider();
}
protected MQTT createMQTTConnection() throws Exception {
MQTT client = createMQTTConnection(null, false);
client.setVersion("3.1.1");
return client;
}
protected MQTT createMQTTConnection(String clientId, boolean clean) throws Exception {
if (isUseSSL()) {
return createMQTTSslConnection(clientId, clean);
} else {
return createMQTTTcpConnection(clientId, clean);
}
}
private MQTT createMQTTTcpConnection(String clientId, boolean clean) throws Exception {
MQTT mqtt = new MQTT();
mqtt.setConnectAttemptsMax(1);
mqtt.setReconnectAttemptsMax(0);
mqtt.setTracer(createTracer());
mqtt.setVersion("3.1.1");
if (clientId != null) {
mqtt.setClientId(clientId);
}
mqtt.setCleanSession(clean);
mqtt.setHost("localhost", port);
return mqtt;
}
private MQTT createMQTTSslConnection(String clientId, boolean clean) throws Exception {
MQTT mqtt = new MQTT();
mqtt.setConnectAttemptsMax(1);
mqtt.setReconnectAttemptsMax(0);
mqtt.setTracer(createTracer());
mqtt.setHost("ssl://localhost:" + port);
if (clientId != null) {
mqtt.setClientId(clientId);
}
mqtt.setCleanSession(clean);
SSLContext ctx = SSLContext.getInstance("TLS");
ctx.init(new KeyManager[0], new TrustManager[]{new DefaultTrustManager()}, new SecureRandom());
mqtt.setSslContext(ctx);
return mqtt;
}
protected Tracer createTracer() {
return new Tracer() {
@Override
public void onReceive(MQTTFrame frame) {
LOG.info("Client Received:\n" + frame);
}
@Override
public void onSend(MQTTFrame frame) {
LOG.info("Client Sent:\n" + frame);
}
@Override
public void debug(String message, Object... args) {
LOG.info(String.format(message, args));
}
};
}
static class DefaultTrustManager implements X509TrustManager {
@Override
public void checkClientTrusted(X509Certificate[] x509Certificates, String s) throws CertificateException {
}
@Override
public void checkServerTrusted(X509Certificate[] x509Certificates, String s) throws CertificateException {
}
@Override
public X509Certificate[] getAcceptedIssuers() {
return new X509Certificate[0];
}
}
public static class MQTTIncomingInterceptor implements MQTTInterceptor {
private static int messageCount = 0;
@Override
public boolean intercept(MqttMessage packet, RemotingConnection connection) throws ActiveMQException {
messageCount++;
return true;
}
public static void clear() {
messageCount = 0;
}
public static int getMessageCount() {
return messageCount;
}
}
public static class MQTTOutoingInterceptor implements MQTTInterceptor {
private static int messageCount = 0;
@Override
public boolean intercept(MqttMessage packet, RemotingConnection connection) throws ActiveMQException {
messageCount++;
return true;
}
public static void clear() {
messageCount = 0;
}
public static int getMessageCount() {
return messageCount;
}
}
}
| |
package alec_wam.CrystalMod.tiles.pipes.estorage.panel.monitor;
import java.io.IOException;
import java.util.List;
import com.google.common.collect.Lists;
import alec_wam.CrystalMod.api.estorage.security.NetworkAbility;
import alec_wam.CrystalMod.blocks.ModBlocks;
import alec_wam.CrystalMod.network.CrystalModNetwork;
import alec_wam.CrystalMod.tiles.pipes.estorage.EStorageNetworkClient;
import alec_wam.CrystalMod.tiles.pipes.estorage.PacketEStorageAddItem;
import alec_wam.CrystalMod.tiles.pipes.estorage.client.IGuiScreen;
import alec_wam.CrystalMod.tiles.pipes.estorage.panel.BlockPanel.PanelType;
import alec_wam.CrystalMod.util.ChatUtil;
import alec_wam.CrystalMod.util.Lang;
import alec_wam.CrystalMod.util.client.GuiUtil;
import alec_wam.CrystalMod.util.client.Scrollbar;
import net.minecraft.client.gui.GuiButton;
import net.minecraft.client.gui.inventory.GuiContainer;
import net.minecraft.client.renderer.GlStateManager;
import net.minecraft.client.renderer.RenderHelper;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.util.ResourceLocation;
import net.minecraft.util.text.TextFormatting;
public class GuiPanelMonitor extends GuiContainer implements IGuiScreen {
public static final int VISIBLE_ROWS = 3;
public static final int ITEM_WIDTH = 72;
public static final int ITEM_HEIGHT = 30;
private TileEntityPanelMonitor craftingMonitor;
private GuiButton cancelButton;
private GuiButton cancelAllButton;
private int itemSelected = -1;
private boolean renderItemSelection;
private int renderItemSelectionX;
private int renderItemSelectionY;
private Scrollbar scrollbar = new Scrollbar(157, 20, 12, 89);
public GuiPanelMonitor(EntityPlayer player, TileEntityPanelMonitor craftingMonitor) {
super(new ContainerPanelMonitor(player, craftingMonitor));
xSize = this.width = 176;
ySize = this.height = 230;
this.craftingMonitor = craftingMonitor;
}
@Override
public void initGui() {
super.initGui();
String cancel = Lang.localize("gui.cancel", false);
String cancelAll = Lang.localize("gui.cancel_all");
int cancelButtonWidth = 14 + fontRendererObj.getStringWidth(cancel);
int cancelAllButtonWidth = 14 + fontRendererObj.getStringWidth(cancelAll);
cancelButton = new GuiButton(0, guiLeft + 7, guiTop + 113, cancelButtonWidth, 20, cancel);
cancelAllButton = new GuiButton(1, guiLeft + 7 + cancelButtonWidth + 4, guiTop + 113, cancelAllButtonWidth, 20, cancelAll);
buttonList.add(cancelButton);
buttonList.add(cancelAllButton);
}
@Override
public void drawScreen(int mouseX, int mouseY, float partialTicks) {
super.drawScreen(mouseX, mouseY, partialTicks);
scrollbar.update(mouseX - guiLeft, mouseY - guiTop);
}
@Override
public void updateScreen() {
scrollbar.setCanScroll(getRows() > VISIBLE_ROWS);
scrollbar.setScrollDelta((float) scrollbar.getScrollbarHeight() / (float) getRows());
if (itemSelected >= craftingMonitor.getTasks().size()) {
itemSelected = -1;
}
cancelButton.enabled = itemSelected != -1;
cancelAllButton.enabled = craftingMonitor.getTasks().size() > 0;
}
ResourceLocation TEXTURE = new ResourceLocation("crystalmod:textures/gui/eStorage_crafting_monitor.png");
@Override
protected void drawGuiContainerBackgroundLayer(float renderPartialTicks, int mouseX, int mouseY) {
GlStateManager.color(1.0F, 1.0F, 1.0F, 1.0F);
mc.getTextureManager().bindTexture(TEXTURE);
drawTexturedModalRect(guiLeft, guiTop, 0, 0, xSize, ySize);
if (renderItemSelection) {
drawTexturedModalRect(guiLeft + renderItemSelectionX, guiTop + renderItemSelectionY, 178, 0, ITEM_WIDTH, ITEM_HEIGHT);
}
scrollbar.draw(this);
}
@Override
public void drawGuiContainerForegroundLayer(int mouseX, int mouseY) {
GlStateManager.color(1.0F, 1.0F, 1.0F, 1.0F);
//drawString(7, 7, t("gui.refinedstorage:crafting_monitor"));
GlStateManager.disableLighting();
fontRendererObj.drawString(Lang.translateToLocal(ModBlocks.storagePanel.getUnlocalizedName() + "." + PanelType.MONITOR.getName()+".name"), 7, 7, 4210752);
fontRendererObj.drawString(Lang.localize("container.inventory", false), 7, 137, 4210752);
GlStateManager.enableLighting();
int x = 8;
int y = 20;
int item = getOffset() * 2;
RenderHelper.enableGUIStandardItemLighting();
List<String> lines = Lists.newArrayList();
renderItemSelection = false;
for (int i = 0; i < 6; ++i) {
if (item < craftingMonitor.getTasks().size()) {
if (item == itemSelected) {
renderItemSelection = true;
renderItemSelectionX = x;
renderItemSelectionY = y;
}
TileEntityPanelMonitor.ClientSideCraftingTask task = craftingMonitor.getTasks().get(i);
zLevel = 200.0F;
itemRender.zLevel = 200.0F;
itemRender.renderItemIntoGUI(task.output, x, y);
zLevel = 0.0F;
itemRender.zLevel = 0.0F;
float scale = 0.5f;
GlStateManager.pushMatrix();
GlStateManager.scale(scale, scale, 1);
int textX = GuiUtil.calculateOffsetOnScale(x + 20, scale);
fontRendererObj.drawString(task.output.getDisplayName(), textX, GuiUtil.calculateOffsetOnScale(y + 4, scale), 4210752);
GlStateManager.popMatrix();
if (GuiUtil.inBounds(x, y, 16, 16, mouseX-guiLeft, mouseY-guiTop)) {
if(!task.info.trim().equals("")){
String[] preFix = task.info.split("\n");
for (int j = 0; j < preFix.length; ++j) {
String line = preFix[j];
if (line.startsWith("T=")) {
String data = line.substring(2);
String[] items = data.split("&");
for(String itemS : items){
lines.add(itemS.substring(0, 2)+Lang.localize(itemS.substring(2), false));
}
} else if (line.startsWith("I=")) {
line = TextFormatting.YELLOW + Lang.localize(line.substring(2), false);
lines.add(line);
}
}
}else {
lines.add("Empty Info");
}
}
}
if (i == 1 || i == 3) {
x = 8;
y += ITEM_HEIGHT;
} else {
x += ITEM_WIDTH;
}
item++;
}
if (lines != null) {
drawHoveringText(lines, mouseX-guiLeft, mouseY-guiTop);
}
}
public int getOffset() {
return (int) (scrollbar.getCurrentScroll() / 89f * getRows());
}
private int getRows() {
int max = (int) Math.ceil((float) craftingMonitor.getTasks().size() / (float) 2);
return max < 0 ? 0 : max;
}
@Override
protected void actionPerformed(GuiButton button) throws IOException {
super.actionPerformed(button);
boolean canChangeSettings = true;
boolean safe = craftingMonitor.getNetwork() !=null && craftingMonitor.getNetwork() instanceof EStorageNetworkClient;
if(safe){
if(!craftingMonitor.getNetwork().hasAbility(mc.player, NetworkAbility.SETTINGS)){
canChangeSettings = false;
}
}
if(!canChangeSettings){
ChatUtil.sendNoSpam(mc.player, Lang.localize("gui.networkability."+NetworkAbility.SETTINGS.getId()));
return;
}
if (button == cancelButton && itemSelected != -1) {
CrystalModNetwork.sendToServer(new PacketEStorageAddItem(5, craftingMonitor.getTasks().get(itemSelected).id, 0, new byte[0]));
} else if (button == cancelAllButton && craftingMonitor.getTasks().size() > 0) {
CrystalModNetwork.sendToServer(new PacketEStorageAddItem(5, -1, 0, new byte[0]));
}
}
@Override
protected void mouseClicked(int mouseX, int mouseY, int mouseButton) throws IOException {
super.mouseClicked(mouseX, mouseY, mouseButton);
if (mouseButton == 0 && GuiUtil.inBounds(8, 20, 144, 90, mouseX - guiLeft, mouseY - guiTop)) {
itemSelected = -1;
int item = getOffset() * 2;
for (int y = 0; y < 3; ++y) {
for (int x = 0; x < 2; ++x) {
int ix = 8 + (x * ITEM_WIDTH);
int iy = 20 + (y * ITEM_HEIGHT);
if (GuiUtil.inBounds(ix, iy, ITEM_WIDTH, ITEM_HEIGHT, mouseX - guiLeft, mouseY - guiTop) && item < craftingMonitor.getTasks().size()) {
itemSelected = item;
}
item++;
}
}
}
}
@Override
public int getGuiLeft() {
return guiLeft;
}
@Override
public int getGuiTop() {
return guiTop;
}
}
| |
package fi.rivermouth.talous.controller.api;
import java.io.IOException;
import java.io.Serializable;
import java.util.List;
import javax.servlet.ServletContext;
import javax.servlet.http.HttpServletRequest;
import javax.validation.Valid;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.multipart.MultipartFile;
import fi.rivermouth.spring.controller.BaseController;
import fi.rivermouth.spring.controller.CRUDController;
import fi.rivermouth.spring.controller.Method;
import fi.rivermouth.spring.entity.Response;
import fi.rivermouth.spring.service.BaseService;
import fi.rivermouth.talous.domain.File;
import fi.rivermouth.talous.service.FileService;
import fi.rivermouth.talous.service.UserService;
@RestController
@RequestMapping("/api/files/{ownerId}")
public class ApiFileController extends BaseController<File, Long> {
protected static final String PARENT_NOT_FOUND_WITH_ID_S = "Parent not found with id %s.";
@Autowired
private FileService fileService;
@Autowired
private UserService userService;
protected Response parentNotFoundWithIdResponse(Long ownerId) {
return new Response(HttpStatus.BAD_REQUEST, new Response.ErrorMessage(PARENT_NOT_FOUND_WITH_ID_S, ownerId));
}
/**
* PUT: /{ownerId}/{collection}/{parentId}
* success: {@value HttpStatus#CREATED}
* error : {@value HttpStatus#CONFLICT}
* parent not found: {@value HttpStatus#BAD_REQUEST}
*
* Create entity and attach it to parent
* @param entity
* @return
*/
@RequestMapping(value = "/{collection}/{parentId}", method = RequestMethod.PUT, consumes = "application/json")
public Response createWjson(@PathVariable("ownerId") Long ownerId, @PathVariable("collection") String collection,
@PathVariable("parentId") Long parentId, @Valid @RequestBody File entity) {
return _create(ownerId, collection, parentId, entity);
}
@RequestMapping(value = "/{collection}/{parentId}", method = RequestMethod.POST,
consumes = "application/x-www-form-urlencoded")
public Response create(@PathVariable("ownerId") Long ownerId, @PathVariable("collection") String collection,
@PathVariable("parentId") Long parentId, @Valid @ModelAttribute File entity) {
return _create(ownerId, collection, parentId, entity);
}
@RequestMapping(value = "/{collection}/{parentId}", method = RequestMethod.POST, consumes = "multipart/form-data")
public Response createWform(@PathVariable("ownerId") Long ownerId, @PathVariable("collection") String collection,
@PathVariable("parentId") Long parentId, @RequestParam("name") String name,
@RequestParam("content") MultipartFile content,
@RequestParam(value = "mimeType", required = false) String mimeType) throws IOException {
return _create(ownerId, collection, parentId, name, content, mimeType);
}
/**
* PUT: /{ownerId}/{collection}
* success: {@value HttpStatus#CREATED}
* error : {@value HttpStatus#CONFLICT}
* parent not found: {@value HttpStatus#BAD_REQUEST}
*
* Create entity and attach it to owner
* @param entity
* @return
*/
@RequestMapping(value = "/{collection}/root", method = RequestMethod.PUT, consumes = "application/json")
public Response createWjsonToRoot(@PathVariable("ownerId") Long ownerId,
@PathVariable("collection") String collection, @Valid @RequestBody File entity) {
return _create(ownerId, collection, ownerId, entity);
}
@RequestMapping(value = "/{collection}/root", method = RequestMethod.POST,
consumes = "application/x-www-form-urlencoded")
public Response createToRoot(@PathVariable("ownerId") Long ownerId, @PathVariable("collection") String collection,
@Valid @ModelAttribute File entity) {
return _create(ownerId, collection, ownerId, entity);
}
@RequestMapping(value = "/{collection}/root", method = RequestMethod.POST, consumes = "multipart/form-data")
public Response createWformToRoot(@PathVariable("ownerId") Long ownerId,
@PathVariable("collection") String collection, @RequestParam("name") String name,
@RequestParam("content") MultipartFile content,
@RequestParam(value = "mimeType", required = false) String mimeType) throws IOException {
return _create(ownerId, collection, ownerId, name, content, mimeType);
}
private Response _create(Long ownerId, String collection, Long parentId, String name, MultipartFile file,
String mimeType) throws IOException {
File entity = new File(name, file.getBytes());
if (mimeType == null)
entity.setMimeType(file.getContentType());
return _create(ownerId, collection, parentId, entity);
}
private Response _create(Long ownerId, String collection, Long parentId, File entity) {
checkAuthorization(Method.CREATE, null, ownerId);
if (!userService.exists(ownerId))
return parentNotFoundWithIdResponse(ownerId);
entity.setCollection(collection);
entity.setOwner(ownerId);
entity.setAttachedTo(parentId);
Response response = super.create(entity);
return response;
}
/**
* POST: /{ownerId}/{id}
* success: {@value HttpStatus#OK}
* error : {@value HttpStatus#NOT_MODIFIED}
* parent mismatch : {@value HttpStatus#BAD_REQUEST}
* parent not found : {@value HttpStatus#BAD_REQUEST}
* id does not match: {@value HttpStatus#BAD_REQUEST}
*
* Update entity
* @param id
* @param entity
* @return
*/
@RequestMapping(value = "/{id}", method = RequestMethod.POST, consumes = "application/json")
public Response updateWjson(@PathVariable("ownerId") Long ownerId, @PathVariable("id") Long id,
@Valid @RequestBody File entity) {
return _update(ownerId, id, entity);
}
@RequestMapping(value = "/{id}", method = RequestMethod.POST, consumes = "application/x-www-form-urlencoded")
public Response update(@PathVariable("ownerId") Long ownerId, @PathVariable("id") Long id,
@Valid @ModelAttribute File entity) {
return _update(ownerId, id, entity);
}
private Response _update(Long ownerId, Long id, File entity) {
checkAuthorization(Method.UPDATE, id, ownerId);
if (!userService.exists(ownerId))
return parentNotFoundWithIdResponse(ownerId);
return super.update(id, entity);
}
/**
* GET: /{ownerId}/{id}/info
* success: {@value HttpStatus#OK}
* error : {@value HttpStatus#NOT_FOUND}
* parent not found: {@value HttpStatus#BAD_REQUEST}
*
* Get entity
* @param id
* @return
*/
@RequestMapping(value = "/{id}/info", method = RequestMethod.GET)
public Response getInfo(@PathVariable("ownerId") Long ownerId, @PathVariable("id") Long id) {
checkAuthorization(Method.GET, id, ownerId);
if (!userService.exists(ownerId))
return parentNotFoundWithIdResponse(ownerId);
return super.get(id);
}
/**
* GET: /{ownerId}/{id}
* success: {@value HttpStatus#OK}
* error : {@value HttpStatus#NOT_FOUND}
* parent not found: {@value HttpStatus#BAD_REQUEST}
*
* Get entity, produces file
* @param id
* @return
*/
@RequestMapping(value = "/{id}", method = RequestMethod.GET)
public ResponseEntity<byte[]> get(@PathVariable("ownerId") Long ownerId, @PathVariable("id") Long id) {
checkAuthorization(Method.GET, id, ownerId);
File file = fileService.get(id);
final HttpHeaders headers = new HttpHeaders();
headers.setContentType(MediaType.parseMediaType(file.getMimeType()));
headers.setContentLength(file.getSize());
headers.add("Content-Disposition", "attachment; filename=" + file.getName());
return new ResponseEntity<byte[]>(file.getContent(), headers, HttpStatus.OK);
}
/**
* GET: /{ownerId}/{collection}/{parentId}
* success: {@value HttpStatus#OK}
* error :
* parent not found: {@value HttpStatus#BAD_REQUEST}
*
* List entities by collection
* @return
*/
@RequestMapping(value = "/{collection}/all", method = RequestMethod.GET)
public Response list(@PathVariable("ownerId") Long ownerId, @PathVariable("collection") String collection) {
return _list(ownerId, collection, null);
}
@RequestMapping(value = "/{collection}/{parentId}", method = RequestMethod.GET)
public Response listByParentId(@PathVariable("ownerId") Long ownerId,
@PathVariable("collection") String collection, @PathVariable("parentId") Long parentId) {
return _list(ownerId, collection, parentId);
}
private Response _list(Long ownerId, String collection, Long parentId) {
checkAuthorization(Method.LIST, null, ownerId);
if (!userService.exists(ownerId)) {
return parentNotFoundWithIdResponse(ownerId);
}
return listResponse(fileService.list(ownerId, collection, parentId));
}
/**
* DELETE: /{ownerId}/{id}
* success: {@value HttpStatus#OK}
* error : {@value HttpStatus#NOT_FOUND}
* parent not found: {@value HttpStatus#BAD_REQUEST}
*
* Delete entity
* @param entity
* @return
*/
@RequestMapping(value = "/{id}", method = RequestMethod.DELETE)
public Response delete(@PathVariable("ownerId") Long ownerId, @PathVariable("id") Long id) {
checkAuthorization(Method.DELETE, id, ownerId);
if (!userService.exists(ownerId))
return parentNotFoundWithIdResponse(ownerId);
if (!getService().exists(id))
return notFoundWithIdResponse(id);
return super.delete(id);
}
@Override
public BaseService<File, Long> getService() {
return fileService;
}
@Override
public String getEntityKind() {
return "file";
}
@Override
protected <S extends Serializable> boolean isAuthorized(Method method, Long id, S ownerId) {
return userService.isAuthenticatedUserId((Long) ownerId);
}
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.util.io.tar;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Arrays;
/** @deprecated use bundled commons-compress library (to be removed in IDEA 17) */
class TarBuffer {
/** Default record size */
public static final int DEFAULT_RCDSIZE = (512);
/** Default block size */
public static final int DEFAULT_BLKSIZE = (DEFAULT_RCDSIZE * 20);
private InputStream inStream;
private OutputStream outStream;
private byte[] blockBuffer;
private int currBlkIdx;
private int currRecIdx;
private int blockSize;
private int recordSize;
private int recsPerBlock;
private boolean debug;
/**
* Constructor for a TarBuffer on an input stream.
* @param inStream the input stream to use
*/
public TarBuffer(InputStream inStream) {
this(inStream, TarBuffer.DEFAULT_BLKSIZE);
}
/**
* Constructor for a TarBuffer on an input stream.
* @param inStream the input stream to use
* @param blockSize the block size to use
*/
public TarBuffer(InputStream inStream, int blockSize) {
this(inStream, blockSize, TarBuffer.DEFAULT_RCDSIZE);
}
/**
* Constructor for a TarBuffer on an input stream.
* @param inStream the input stream to use
* @param blockSize the block size to use
* @param recordSize the record size to use
*/
public TarBuffer(InputStream inStream, int blockSize, int recordSize) {
this.inStream = inStream;
this.outStream = null;
this.initialize(blockSize, recordSize);
}
/**
* Constructor for a TarBuffer on an output stream.
* @param outStream the output stream to use
*/
public TarBuffer(OutputStream outStream) {
this(outStream, TarBuffer.DEFAULT_BLKSIZE);
}
/**
* Constructor for a TarBuffer on an output stream.
* @param outStream the output stream to use
* @param blockSize the block size to use
*/
public TarBuffer(OutputStream outStream, int blockSize) {
this(outStream, blockSize, TarBuffer.DEFAULT_RCDSIZE);
}
/**
* Constructor for a TarBuffer on an output stream.
* @param outStream the output stream to use
* @param blockSize the block size to use
* @param recordSize the record size to use
*/
public TarBuffer(OutputStream outStream, int blockSize, int recordSize) {
this.inStream = null;
this.outStream = outStream;
this.initialize(blockSize, recordSize);
}
/**
* Initialization common to all constructors.
*/
private void initialize(int blockSize, int recordSize) {
this.debug = false;
this.blockSize = blockSize;
this.recordSize = recordSize;
this.recsPerBlock = (this.blockSize / this.recordSize);
this.blockBuffer = new byte[this.blockSize];
if (this.inStream != null) {
this.currBlkIdx = -1;
this.currRecIdx = this.recsPerBlock;
} else {
this.currBlkIdx = 0;
this.currRecIdx = 0;
}
}
/**
* Get the TAR Buffer's block size. Blocks consist of multiple records.
* @return the block size
*/
public int getBlockSize() {
return this.blockSize;
}
/**
* Get the TAR Buffer's record size.
* @return the record size
*/
public int getRecordSize() {
return this.recordSize;
}
/**
* Set the debugging flag for the buffer.
*
* @param debug If true, print debugging output.
*/
public void setDebug(boolean debug) {
this.debug = debug;
}
/**
* Determine if an archive record indicate End of Archive. End of
* archive is indicated by a record that consists entirely of null bytes.
*
* @param record The record data to check.
* @return true if the record data is an End of Archive
*/
public boolean isEOFRecord(byte[] record) {
for (int i = 0, sz = getRecordSize(); i < sz; ++i) {
if (record[i] != 0) {
return false;
}
}
return true;
}
/**
* Skip over a record on the input stream.
* @throws java.io.IOException on error
*/
public void skipRecord() throws IOException {
if (debug) {
System.err.println("SkipRecord: recIdx = " + currRecIdx
+ " blkIdx = " + currBlkIdx);
}
if (inStream == null) {
throw new IOException("reading (via skip) from an output buffer");
}
if (currRecIdx >= recsPerBlock) {
if (!readBlock()) {
return; // UNDONE
}
}
currRecIdx++;
}
/**
* Read a record from the input stream and return the data.
*
* @return The record data.
* @throws IOException on error
*/
public byte[] readRecord() throws IOException {
if (debug) {
System.err.println("ReadRecord: recIdx = " + currRecIdx
+ " blkIdx = " + currBlkIdx);
}
if (inStream == null) {
throw new IOException("reading from an output buffer");
}
if (currRecIdx >= recsPerBlock) {
if (!readBlock()) {
return null;
}
}
byte[] result = new byte[recordSize];
System.arraycopy(blockBuffer,
(currRecIdx * recordSize), result, 0,
recordSize);
currRecIdx++;
return result;
}
/**
* @return false if End-Of-File, else true
*/
private boolean readBlock() throws IOException {
if (debug) {
System.err.println("ReadBlock: blkIdx = " + currBlkIdx);
}
if (inStream == null) {
throw new IOException("reading from an output buffer");
}
currRecIdx = 0;
int offset = 0;
int bytesNeeded = blockSize;
while (bytesNeeded > 0) {
long numBytes = inStream.read(blockBuffer, offset,
bytesNeeded);
//
// NOTE
// We have fit EOF, and the block is not full!
//
// This is a broken archive. It does not follow the standard
// blocking algorithm. However, because we are generous, and
// it requires little effort, we will simply ignore the error
// and continue as if the entire block were read. This does
// not appear to break anything upstream. We used to return
// false in this case.
//
// Thanks to 'Yohann.Roussel@alcatel.fr' for this fix.
//
if (numBytes == -1) {
if (offset == 0) {
// Ensure that we do not read gigabytes of zeros
// for a corrupt tar file.
// See http://issues.apache.org/bugzilla/show_bug.cgi?id=39924
return false;
}
// However, just leaving the unread portion of the buffer dirty does
// cause problems in some cases. This problem is described in
// http://issues.apache.org/bugzilla/show_bug.cgi?id=29877
//
// The solution is to fill the unused portion of the buffer with zeros.
Arrays.fill(blockBuffer, offset, offset + bytesNeeded, (byte)0);
break;
}
offset += numBytes;
bytesNeeded -= numBytes;
if (numBytes != blockSize) {
if (debug) {
System.err.println("ReadBlock: INCOMPLETE READ "
+ numBytes + " of " + blockSize
+ " bytes read.");
}
}
}
currBlkIdx++;
return true;
}
/**
* Get the current block number, zero based.
*
* @return The current zero based block number.
*/
public int getCurrentBlockNum() {
return currBlkIdx;
}
/**
* Get the current record number, within the current block, zero based.
* Thus, current offset = (currentBlockNum * recsPerBlk) + currentRecNum.
*
* @return The current zero based record number.
*/
public int getCurrentRecordNum() {
return currRecIdx - 1;
}
/**
* Write an archive record to the archive.
*
* @param record The record data to write to the archive.
* @throws IOException on error
*/
public void writeRecord(byte[] record) throws IOException {
if (debug) {
System.err.println("WriteRecord: recIdx = " + currRecIdx
+ " blkIdx = " + currBlkIdx);
}
if (outStream == null) {
throw new IOException("writing to an input buffer");
}
if (record.length != recordSize) {
throw new IOException("record to write has length '"
+ record.length
+ "' which is not the record size of '"
+ recordSize + "'");
}
if (currRecIdx >= recsPerBlock) {
writeBlock();
}
System.arraycopy(record, 0, blockBuffer,
(currRecIdx * recordSize),
recordSize);
currRecIdx++;
}
/**
* Write an archive record to the archive, where the record may be
* inside of a larger array buffer. The buffer must be "offset plus
* record size" long.
*
* @param buf The buffer containing the record data to write.
* @param offset The offset of the record data within buf.
* @throws IOException on error
*/
public void writeRecord(byte[] buf, int offset) throws IOException {
if (debug) {
System.err.println("WriteRecord: recIdx = " + currRecIdx
+ " blkIdx = " + currBlkIdx);
}
if (outStream == null) {
throw new IOException("writing to an input buffer");
}
if ((offset + recordSize) > buf.length) {
throw new IOException("record has length '" + buf.length
+ "' with offset '" + offset
+ "' which is less than the record size of '"
+ recordSize + "'");
}
if (currRecIdx >= recsPerBlock) {
writeBlock();
}
System.arraycopy(buf, offset, blockBuffer,
(currRecIdx * recordSize),
recordSize);
currRecIdx++;
}
/**
* Write a TarBuffer block to the archive.
*/
private void writeBlock() throws IOException {
if (debug) {
System.err.println("WriteBlock: blkIdx = " + currBlkIdx);
}
if (outStream == null) {
throw new IOException("writing to an input buffer");
}
outStream.write(blockBuffer, 0, blockSize);
outStream.flush();
currRecIdx = 0;
currBlkIdx++;
Arrays.fill(blockBuffer, (byte) 0);
}
/**
* Flush the current data block if it has any data in it.
*/
void flushBlock() throws IOException {
if (debug) {
System.err.println("TarBuffer.flushBlock() called.");
}
if (outStream == null) {
throw new IOException("writing to an input buffer");
}
if (currRecIdx > 0) {
writeBlock();
}
}
/**
* Close the TarBuffer. If this is an output buffer, also flush the
* current block before closing.
* @throws IOException on error
*/
public void close() throws IOException {
if (debug) {
System.err.println("TarBuffer.closeBuffer().");
}
if (outStream != null) {
flushBlock();
if (outStream != System.out
&& outStream != System.err) {
outStream.close();
outStream = null;
}
} else if (inStream != null) {
if (inStream != System.in) {
inStream.close();
inStream = null;
}
}
}
}
| |
/*===========================================================================
* Licensed Materials - Property of IBM
* "Restricted Materials of IBM"
*
* IBM SDK, Java(tm) Technology Edition, v8
* (C) Copyright IBM Corp. 1998, 2013. All Rights Reserved
*
* US Government Users Restricted Rights - Use, duplication or disclosure
* restricted by GSA ADP Schedule Contract with IBM Corp.
*===========================================================================
*/
/*
* Copyright (c) 1998, 2013, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*/
package java.util;
/**
* A {@link NavigableSet} implementation based on a {@link TreeMap}.
* The elements are ordered using their {@linkplain Comparable natural
* ordering}, or by a {@link Comparator} provided at set creation
* time, depending on which constructor is used.
*
* <p>This implementation provides guaranteed log(n) time cost for the basic
* operations ({@code add}, {@code remove} and {@code contains}).
*
* <p>Note that the ordering maintained by a set (whether or not an explicit
* comparator is provided) must be <i>consistent with equals</i> if it is to
* correctly implement the {@code Set} interface. (See {@code Comparable}
* or {@code Comparator} for a precise definition of <i>consistent with
* equals</i>.) This is so because the {@code Set} interface is defined in
* terms of the {@code equals} operation, but a {@code TreeSet} instance
* performs all element comparisons using its {@code compareTo} (or
* {@code compare}) method, so two elements that are deemed equal by this method
* are, from the standpoint of the set, equal. The behavior of a set
* <i>is</i> well-defined even if its ordering is inconsistent with equals; it
* just fails to obey the general contract of the {@code Set} interface.
*
* <p><strong>Note that this implementation is not synchronized.</strong>
* If multiple threads access a tree set concurrently, and at least one
* of the threads modifies the set, it <i>must</i> be synchronized
* externally. This is typically accomplished by synchronizing on some
* object that naturally encapsulates the set.
* If no such object exists, the set should be "wrapped" using the
* {@link Collections#synchronizedSortedSet Collections.synchronizedSortedSet}
* method. This is best done at creation time, to prevent accidental
* unsynchronized access to the set: <pre>
* SortedSet s = Collections.synchronizedSortedSet(new TreeSet(...));</pre>
*
* <p>The iterators returned by this class's {@code iterator} method are
* <i>fail-fast</i>: if the set is modified at any time after the iterator is
* created, in any way except through the iterator's own {@code remove}
* method, the iterator will throw a {@link ConcurrentModificationException}.
* Thus, in the face of concurrent modification, the iterator fails quickly
* and cleanly, rather than risking arbitrary, non-deterministic behavior at
* an undetermined time in the future.
*
* <p>Note that the fail-fast behavior of an iterator cannot be guaranteed
* as it is, generally speaking, impossible to make any hard guarantees in the
* presence of unsynchronized concurrent modification. Fail-fast iterators
* throw {@code ConcurrentModificationException} on a best-effort basis.
* Therefore, it would be wrong to write a program that depended on this
* exception for its correctness: <i>the fail-fast behavior of iterators
* should be used only to detect bugs.</i>
*
* <p>This class is a member of the
* <a href="{@docRoot}/../technotes/guides/collections/index.html">
* Java Collections Framework</a>.
*
* @param <E> the type of elements maintained by this set
*
* @author Josh Bloch
* @see Collection
* @see Set
* @see HashSet
* @see Comparable
* @see Comparator
* @see TreeMap
* @since 1.2
*/
public class TreeSet<E> extends AbstractSet<E>
implements NavigableSet<E>, Cloneable, java.io.Serializable
{
/**
* The backing map.
*/
private transient NavigableMap<E,Object> m;
// Dummy value to associate with an Object in the backing Map
private static final Object PRESENT = new Object();
/**
* Constructs a set backed by the specified navigable map.
*/
TreeSet(NavigableMap<E,Object> m) {
this.m = m;
}
/**
* Constructs a new, empty tree set, sorted according to the
* natural ordering of its elements. All elements inserted into
* the set must implement the {@link Comparable} interface.
* Furthermore, all such elements must be <i>mutually
* comparable</i>: {@code e1.compareTo(e2)} must not throw a
* {@code ClassCastException} for any elements {@code e1} and
* {@code e2} in the set. If the user attempts to add an element
* to the set that violates this constraint (for example, the user
* attempts to add a string element to a set whose elements are
* integers), the {@code add} call will throw a
* {@code ClassCastException}.
*/
public TreeSet() {
this(new TreeMap<E,Object>());
}
/**
* Constructs a new, empty tree set, sorted according to the specified
* comparator. All elements inserted into the set must be <i>mutually
* comparable</i> by the specified comparator: {@code comparator.compare(e1,
* e2)} must not throw a {@code ClassCastException} for any elements
* {@code e1} and {@code e2} in the set. If the user attempts to add
* an element to the set that violates this constraint, the
* {@code add} call will throw a {@code ClassCastException}.
*
* @param comparator the comparator that will be used to order this set.
* If {@code null}, the {@linkplain Comparable natural
* ordering} of the elements will be used.
*/
public TreeSet(Comparator<? super E> comparator) {
this(new TreeMap<>(comparator));
}
/**
* Constructs a new tree set containing the elements in the specified
* collection, sorted according to the <i>natural ordering</i> of its
* elements. All elements inserted into the set must implement the
* {@link Comparable} interface. Furthermore, all such elements must be
* <i>mutually comparable</i>: {@code e1.compareTo(e2)} must not throw a
* {@code ClassCastException} for any elements {@code e1} and
* {@code e2} in the set.
*
* @param c collection whose elements will comprise the new set
* @throws ClassCastException if the elements in {@code c} are
* not {@link Comparable}, or are not mutually comparable
* @throws NullPointerException if the specified collection is null
*/
public TreeSet(Collection<? extends E> c) {
this();
addAll(c);
}
/**
* Constructs a new tree set containing the same elements and
* using the same ordering as the specified sorted set.
*
* @param s sorted set whose elements will comprise the new set
* @throws NullPointerException if the specified sorted set is null
*/
public TreeSet(SortedSet<E> s) {
this(s.comparator());
addAll(s);
}
/**
* Returns an iterator over the elements in this set in ascending order.
*
* @return an iterator over the elements in this set in ascending order
*/
public Iterator<E> iterator() {
return m.navigableKeySet().iterator();
}
/**
* Returns an iterator over the elements in this set in descending order.
*
* @return an iterator over the elements in this set in descending order
* @since 1.6
*/
public Iterator<E> descendingIterator() {
return m.descendingKeySet().iterator();
}
/**
* @since 1.6
*/
public NavigableSet<E> descendingSet() {
return new TreeSet<>(m.descendingMap());
}
/**
* Returns the number of elements in this set (its cardinality).
*
* @return the number of elements in this set (its cardinality)
*/
public int size() {
return m.size();
}
/**
* Returns {@code true} if this set contains no elements.
*
* @return {@code true} if this set contains no elements
*/
public boolean isEmpty() {
return m.isEmpty();
}
/**
* Returns {@code true} if this set contains the specified element.
* More formally, returns {@code true} if and only if this set
* contains an element {@code e} such that
* <tt>(o==null ? e==null : o.equals(e))</tt>.
*
* @param o object to be checked for containment in this set
* @return {@code true} if this set contains the specified element
* @throws ClassCastException if the specified object cannot be compared
* with the elements currently in the set
* @throws NullPointerException if the specified element is null
* and this set uses natural ordering, or its comparator
* does not permit null elements
*/
public boolean contains(Object o) {
return m.containsKey(o);
}
/**
* Adds the specified element to this set if it is not already present.
* More formally, adds the specified element {@code e} to this set if
* the set contains no element {@code e2} such that
* <tt>(e==null ? e2==null : e.equals(e2))</tt>.
* If this set already contains the element, the call leaves the set
* unchanged and returns {@code false}.
*
* @param e element to be added to this set
* @return {@code true} if this set did not already contain the specified
* element
* @throws ClassCastException if the specified object cannot be compared
* with the elements currently in this set
* @throws NullPointerException if the specified element is null
* and this set uses natural ordering, or its comparator
* does not permit null elements
*/
public boolean add(E e) {
return m.put(e, PRESENT)==null;
}
/**
* Removes the specified element from this set if it is present.
* More formally, removes an element {@code e} such that
* <tt>(o==null ? e==null : o.equals(e))</tt>,
* if this set contains such an element. Returns {@code true} if
* this set contained the element (or equivalently, if this set
* changed as a result of the call). (This set will not contain the
* element once the call returns.)
*
* @param o object to be removed from this set, if present
* @return {@code true} if this set contained the specified element
* @throws ClassCastException if the specified object cannot be compared
* with the elements currently in this set
* @throws NullPointerException if the specified element is null
* and this set uses natural ordering, or its comparator
* does not permit null elements
*/
public boolean remove(Object o) {
return m.remove(o)==PRESENT;
}
/**
* Removes all of the elements from this set.
* The set will be empty after this call returns.
*/
public void clear() {
m.clear();
}
/**
* Adds all of the elements in the specified collection to this set.
*
* @param c collection containing elements to be added to this set
* @return {@code true} if this set changed as a result of the call
* @throws ClassCastException if the elements provided cannot be compared
* with the elements currently in the set
* @throws NullPointerException if the specified collection is null or
* if any element is null and this set uses natural ordering, or
* its comparator does not permit null elements
*/
public boolean addAll(Collection<? extends E> c) {
// Use linear-time version if applicable
if (m.size()==0 && c.size() > 0 &&
c instanceof SortedSet &&
m instanceof TreeMap) {
SortedSet<? extends E> set = (SortedSet<? extends E>) c;
TreeMap<E,Object> map = (TreeMap<E, Object>) m;
Comparator<?> cc = set.comparator();
Comparator<? super E> mc = map.comparator();
if (cc==mc || (cc != null && cc.equals(mc))) {
map.addAllForTreeSet(set, PRESENT);
return true;
}
}
return super.addAll(c);
}
/**
* @throws ClassCastException {@inheritDoc}
* @throws NullPointerException if {@code fromElement} or {@code toElement}
* is null and this set uses natural ordering, or its comparator
* does not permit null elements
* @throws IllegalArgumentException {@inheritDoc}
* @since 1.6
*/
public NavigableSet<E> subSet(E fromElement, boolean fromInclusive,
E toElement, boolean toInclusive) {
return new TreeSet<>(m.subMap(fromElement, fromInclusive,
toElement, toInclusive));
}
/**
* @throws ClassCastException {@inheritDoc}
* @throws NullPointerException if {@code toElement} is null and
* this set uses natural ordering, or its comparator does
* not permit null elements
* @throws IllegalArgumentException {@inheritDoc}
* @since 1.6
*/
public NavigableSet<E> headSet(E toElement, boolean inclusive) {
return new TreeSet<>(m.headMap(toElement, inclusive));
}
/**
* @throws ClassCastException {@inheritDoc}
* @throws NullPointerException if {@code fromElement} is null and
* this set uses natural ordering, or its comparator does
* not permit null elements
* @throws IllegalArgumentException {@inheritDoc}
* @since 1.6
*/
public NavigableSet<E> tailSet(E fromElement, boolean inclusive) {
return new TreeSet<>(m.tailMap(fromElement, inclusive));
}
/**
* @throws ClassCastException {@inheritDoc}
* @throws NullPointerException if {@code fromElement} or
* {@code toElement} is null and this set uses natural ordering,
* or its comparator does not permit null elements
* @throws IllegalArgumentException {@inheritDoc}
*/
public SortedSet<E> subSet(E fromElement, E toElement) {
return subSet(fromElement, true, toElement, false);
}
/**
* @throws ClassCastException {@inheritDoc}
* @throws NullPointerException if {@code toElement} is null
* and this set uses natural ordering, or its comparator does
* not permit null elements
* @throws IllegalArgumentException {@inheritDoc}
*/
public SortedSet<E> headSet(E toElement) {
return headSet(toElement, false);
}
/**
* @throws ClassCastException {@inheritDoc}
* @throws NullPointerException if {@code fromElement} is null
* and this set uses natural ordering, or its comparator does
* not permit null elements
* @throws IllegalArgumentException {@inheritDoc}
*/
public SortedSet<E> tailSet(E fromElement) {
return tailSet(fromElement, true);
}
public Comparator<? super E> comparator() {
return m.comparator();
}
/**
* @throws NoSuchElementException {@inheritDoc}
*/
public E first() {
return m.firstKey();
}
/**
* @throws NoSuchElementException {@inheritDoc}
*/
public E last() {
return m.lastKey();
}
// NavigableSet API methods
/**
* @throws ClassCastException {@inheritDoc}
* @throws NullPointerException if the specified element is null
* and this set uses natural ordering, or its comparator
* does not permit null elements
* @since 1.6
*/
public E lower(E e) {
return m.lowerKey(e);
}
/**
* @throws ClassCastException {@inheritDoc}
* @throws NullPointerException if the specified element is null
* and this set uses natural ordering, or its comparator
* does not permit null elements
* @since 1.6
*/
public E floor(E e) {
return m.floorKey(e);
}
/**
* @throws ClassCastException {@inheritDoc}
* @throws NullPointerException if the specified element is null
* and this set uses natural ordering, or its comparator
* does not permit null elements
* @since 1.6
*/
public E ceiling(E e) {
return m.ceilingKey(e);
}
/**
* @throws ClassCastException {@inheritDoc}
* @throws NullPointerException if the specified element is null
* and this set uses natural ordering, or its comparator
* does not permit null elements
* @since 1.6
*/
public E higher(E e) {
return m.higherKey(e);
}
/**
* @since 1.6
*/
public E pollFirst() {
Map.Entry<E,?> e = m.pollFirstEntry();
return (e == null) ? null : e.getKey();
}
/**
* @since 1.6
*/
public E pollLast() {
Map.Entry<E,?> e = m.pollLastEntry();
return (e == null) ? null : e.getKey();
}
/**
* Returns a shallow copy of this {@code TreeSet} instance. (The elements
* themselves are not cloned.)
*
* @return a shallow copy of this set
*/
@SuppressWarnings("unchecked")
public Object clone() {
TreeSet<E> clone;
try {
clone = (TreeSet<E>) super.clone();
} catch (CloneNotSupportedException e) {
throw new InternalError(e);
}
clone.m = new TreeMap<>(m);
return clone;
}
/**
* Save the state of the {@code TreeSet} instance to a stream (that is,
* serialize it).
*
* @serialData Emits the comparator used to order this set, or
* {@code null} if it obeys its elements' natural ordering
* (Object), followed by the size of the set (the number of
* elements it contains) (int), followed by all of its
* elements (each an Object) in order (as determined by the
* set's Comparator, or by the elements' natural ordering if
* the set has no Comparator).
*/
private void writeObject(java.io.ObjectOutputStream s)
throws java.io.IOException {
// Write out any hidden stuff
s.defaultWriteObject();
// Write out Comparator
s.writeObject(m.comparator());
// Write out size
s.writeInt(m.size());
// Write out all elements in the proper order.
for (E e : m.keySet())
s.writeObject(e);
}
/**
* Reconstitute the {@code TreeSet} instance from a stream (that is,
* deserialize it).
*/
private void readObject(java.io.ObjectInputStream s)
throws java.io.IOException, ClassNotFoundException {
// Read in any hidden stuff
s.defaultReadObject();
// Read in Comparator
@SuppressWarnings("unchecked")
Comparator<? super E> c = (Comparator<? super E>) s.readObject();
// Create backing TreeMap
TreeMap<E,Object> tm = new TreeMap<>(c);
m = tm;
// Read in size
int size = s.readInt();
tm.readTreeSet(size, s, PRESENT);
}
/**
* Creates a <em><a href="Spliterator.html#binding">late-binding</a></em>
* and <em>fail-fast</em> {@link Spliterator} over the elements in this
* set.
*
* <p>The {@code Spliterator} reports {@link Spliterator#SIZED},
* {@link Spliterator#DISTINCT}, {@link Spliterator#SORTED}, and
* {@link Spliterator#ORDERED}. Overriding implementations should document
* the reporting of additional characteristic values.
*
* <p>The spliterator's comparator (see
* {@link java.util.Spliterator#getComparator()}) is {@code null} if
* the tree set's comparator (see {@link #comparator()}) is {@code null}.
* Otherwise, the spliterator's comparator is the same as or imposes the
* same total ordering as the tree set's comparator.
*
* @return a {@code Spliterator} over the elements in this set
* @since 1.8
*/
public Spliterator<E> spliterator() {
return TreeMap.keySpliteratorFor(m);
}
private static final long serialVersionUID = -2479143000061671589L;
}
| |
package my.dx.ball;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import my.dx.ball.DxtestActivity.viewsurface;
import android.R.color;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.util.Log;
import android.view.MotionEvent;
import android.view.SurfaceView;
import android.view.View;
import android.view.View.OnTouchListener;
public class DrawObject implements OnTouchListener {
public ArrayList<drawable> all;
viewsurface v;
ball b;
bar br;
int numOfbrick;
int posTop =0;
int posLeft =0;
int posRight =0;
int barPos=0;
int count=0;
ArrayList<brick> allBrick;
List<Integer> list= new ArrayList<Integer>();
int width,height;
boolean firstTime=true;
int dx,dy;
float closestX ;
float closestY;
boolean collison = false;
boolean BarCollison = false;
int moveX;
int moveY;
stage st;
Canvas can;
Thread trd;
public DrawObject(viewsurface v,Thread t) {
// TODO Auto-generated constructor stub
this.v=v;
trd = t;
v.setOnTouchListener(this);
}
public void Ondraw(Canvas canvas ,stage s){
if(firstTime){
firstTime=false;
width=canvas.getWidth();
height=canvas.getHeight();
all = s.AllObject;
b=(ball) all.get(0);
br=(bar) all.get(1);
allBrick=s.allBrick;
dx=dy=s.ballSpeed;
this.numOfbrick=s.NumOfBrick;
this.st = s;
this.can = canvas;
}
//draw Ball
b.draw(canvas);
//draw Bar
br.draw(canvas);
for(int i=0;i<allBrick.size();i++){
//draw Brick
allBrick.get(i).draw(canvas);
}
moverandomly();
}
public void moverandomly(){
if(collison){
if(b.y<posTop+b.r){
dy=-dy;
posTop=0;
collison=false;
}
if(b.x<posLeft+15){
dx=-dx;
posLeft=0;
collison=false;
}
if(b.x+b.r>posRight){
dx=-dx;
posRight=0;
collison=false;
//Log.i("MyApp","count");
//this.dx=-this.dx;
}
}
else if(BarCollison){
if(b.y+b.r>=barPos){
dy=-dy;
barPos=0;
BarCollison=false;
}
}
else{
if(b.x+b.r>width){
dx=-dx;
//Log.i("MyApp","count");
//this.dx=-this.dx;
}
if(b.x<b.r){
dx=-dx;
}
if(b.y+b.r>height){
//dy=-dy;
dy = 0;
dx = 0;
//trd.destroy();
}
if(b.y<b.r){
dy=-dy;
}
}
b.x+=dx;
b.y+=dy;
//Brick and ball Collision
for(int i=0;i<allBrick.size();i++)
{
collision(b,allBrick.get(i),i);
}
//Ball and Bar Collision
ColWithBar(b,br);
}
public void collision(ball bl,brick bk,int brkNo){
int closeX = bl.x;
int closeY = bl.y;
if(bl.x<bk.brk.left)
{
closeX =bk.brk.left ;
}
else if(bl.x>bk.brk.right)
{
closeX =bk.brk.right;
}
if(bl.y>bk.brk.bottom)
{
closeY =bk.brk.bottom;
}
else if(bl.y<bk.brk.top)
{
closeY =bk.brk.top;
}
int distx = (int) Math.pow((bl.x - closeX), 2);
int disty = (int) Math.pow((bl.y - closeY), 2);
int distsqr = distx + disty;
if(Math.sqrt(distsqr)<bl.r)
{
collison=true;
posTop=allBrick.get(brkNo).brk.bottom;
posLeft=allBrick.get(brkNo).brk.left;
posRight=allBrick.get(brkNo).brk.right;
allBrick.get(brkNo).state +=1;
allBrick.get(brkNo).paint.setColor(Color.BLACK);
Log.i("MyApp","state"+allBrick.get(brkNo).state);
/*if(allBrick.get(brkNo).first)
{
if(allBrick.get(brkNo).paint.getColor()==Color.BLACK)
{
allBrick.remove(brkNo);
}
}*/
if(allBrick.get(brkNo).state % 2 ==0)
{
allBrick.remove(brkNo);
}
if(allBrick.size() ==0)
{
//st.setLevel(2);
//Ondraw(can, st);
//DrawObject d = new DrawObject(v);
stage stg = new stage(v.context);
firstTime = true;
try {
stg.setLevel(2);
Ondraw(can, stg);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
//allBrick.get(brkNo).first = true;
// Log.i("MyApp","collision"+brkNo);
}
}
public void ColWithBar(ball bl,bar bar){
int closeX = bl.x;
int closeY = bl.y;
if(bl.x<bar.br.left)
{
closeX =bar.br.left ;
}
else if(bl.x>bar.br.right)
{
closeX =bar.br.right;
}
if(bl.y>bar.br.bottom)
{
closeY =bar.br.bottom;
}
else if(bl.y<bar.br.top)
{
closeY =bar.br.top;
}
int distx = (int) Math.pow((bl.x - closeX), 2);
int disty = (int) Math.pow((bl.y - closeY), 2);
int distsqr = distx + disty;
if(Math.sqrt(distsqr)<bl.r)
{
BarCollison=true;
barPos=bar.br.top;
// Log.i("MyApp","collision");
}
}
public boolean onTouch(View v, MotionEvent event) {
moveX= (int) event.getX();
moveY= (int) event.getY();
// TODO Auto-generated method stub
switch(event.getAction()){
case MotionEvent.ACTION_UP :
//movebar();
break;
case MotionEvent.ACTION_MOVE :
if(moveY>780){
movebar();
} //Log.i("MyApp","bar touch"+moveX);
// Log.i("MyApp","move"+moveX);
break;
case MotionEvent.ACTION_DOWN :
break;
}
return true;
}
public void movebar(){
if(moveX>0&&moveX<400){
br.br.left=moveX;
br.br.top=780;
br.br.right=moveX+80;
br.br.bottom=800;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.platform.cache.affinity;
import org.apache.ignite.*;
import org.apache.ignite.cache.affinity.*;
import org.apache.ignite.cluster.*;
import org.apache.ignite.internal.*;
import org.apache.ignite.internal.managers.discovery.*;
import org.apache.ignite.internal.portable.*;
import org.apache.ignite.internal.processors.platform.*;
import org.apache.ignite.internal.util.typedef.*;
import org.apache.ignite.internal.util.typedef.internal.*;
import org.jetbrains.annotations.*;
import java.util.*;
/**
* Native cache wrapper implementation.
*/
@SuppressWarnings({"unchecked", "UnusedDeclaration", "TryFinallyCanBeTryWithResources"})
public class PlatformAffinity extends PlatformAbstractTarget {
/** */
public static final int OP_AFFINITY_KEY = 1;
/** */
public static final int OP_ALL_PARTITIONS = 2;
/** */
public static final int OP_BACKUP_PARTITIONS = 3;
/** */
public static final int OP_IS_BACKUP = 4;
/** */
public static final int OP_IS_PRIMARY = 5;
/** */
public static final int OP_IS_PRIMARY_OR_BACKUP = 6;
/** */
public static final int OP_MAP_KEY_TO_NODE = 7;
/** */
public static final int OP_MAP_KEY_TO_PRIMARY_AND_BACKUPS = 8;
/** */
public static final int OP_MAP_KEYS_TO_NODES = 9;
/** */
public static final int OP_MAP_PARTITION_TO_NODE = 10;
/** */
public static final int OP_MAP_PARTITION_TO_PRIMARY_AND_BACKUPS = 11;
/** */
public static final int OP_MAP_PARTITIONS_TO_NODES = 12;
/** */
public static final int OP_PARTITION = 13;
/** */
public static final int OP_PRIMARY_PARTITIONS = 14;
/** */
private static final C1<ClusterNode, UUID> TO_NODE_ID = new C1<ClusterNode, UUID>() {
@Nullable @Override public UUID apply(ClusterNode node) {
return node != null ? node.id() : null;
}
};
/** Underlying cache affinity. */
private final Affinity<Object> aff;
/** Discovery manager */
private final GridDiscoveryManager discovery;
/**
* Constructor.
*
* @param platformCtx Context.
* @param igniteCtx Ignite context.
* @param name Cache name.
*/
public PlatformAffinity(PlatformContext platformCtx, GridKernalContext igniteCtx, @Nullable String name)
throws IgniteCheckedException {
super(platformCtx);
this.aff = igniteCtx.grid().affinity(name);
if (aff == null)
throw new IgniteCheckedException("Cache with the given name doesn't exist: " + name);
discovery = igniteCtx.discovery();
}
/** {@inheritDoc} */
@Override protected int processInOp(int type, PortableRawReaderEx reader) throws IgniteCheckedException {
switch (type) {
case OP_PARTITION:
return aff.partition(reader.readObjectDetached());
case OP_IS_PRIMARY: {
UUID nodeId = reader.readUuid();
Object key = reader.readObjectDetached();
ClusterNode node = discovery.node(nodeId);
if (node == null)
return FALSE;
return aff.isPrimary(node, key) ? TRUE : FALSE;
}
case OP_IS_BACKUP: {
UUID nodeId = reader.readUuid();
Object key = reader.readObjectDetached();
ClusterNode node = discovery.node(nodeId);
if (node == null)
return FALSE;
return aff.isBackup(node, key) ? TRUE : FALSE;
}
case OP_IS_PRIMARY_OR_BACKUP: {
UUID nodeId = reader.readUuid();
Object key = reader.readObjectDetached();
ClusterNode node = discovery.node(nodeId);
if (node == null)
return FALSE;
return aff.isPrimaryOrBackup(node, key) ? TRUE : FALSE;
}
default:
return throwUnsupported(type);
}
}
/** {@inheritDoc} */
@SuppressWarnings({"IfMayBeConditional", "ConstantConditions"})
@Override protected void processInOutOp(int type, PortableRawReaderEx reader, PortableRawWriterEx writer,
Object arg) throws IgniteCheckedException {
switch (type) {
case OP_PRIMARY_PARTITIONS: {
UUID nodeId = reader.readObject();
ClusterNode node = discovery.node(nodeId);
int[] parts = node != null ? aff.primaryPartitions(node) : U.EMPTY_INTS;
writer.writeIntArray(parts);
break;
}
case OP_BACKUP_PARTITIONS: {
UUID nodeId = reader.readObject();
ClusterNode node = discovery.node(nodeId);
int[] parts = node != null ? aff.backupPartitions(node) : U.EMPTY_INTS;
writer.writeIntArray(parts);
break;
}
case OP_ALL_PARTITIONS: {
UUID nodeId = reader.readObject();
ClusterNode node = discovery.node(nodeId);
int[] parts = node != null ? aff.allPartitions(node) : U.EMPTY_INTS;
writer.writeIntArray(parts);
break;
}
case OP_AFFINITY_KEY: {
Object key = reader.readObjectDetached();
writer.writeObject(aff.affinityKey(key));
break;
}
case OP_MAP_KEY_TO_NODE: {
Object key = reader.readObjectDetached();
ClusterNode node = aff.mapKeyToNode(key);
platformCtx.writeNode(writer, node);
break;
}
case OP_MAP_PARTITION_TO_NODE: {
int part = reader.readObject();
ClusterNode node = aff.mapPartitionToNode(part);
platformCtx.writeNode(writer, node);
break;
}
case OP_MAP_KEY_TO_PRIMARY_AND_BACKUPS: {
Object key = reader.readObjectDetached();
platformCtx.writeNodes(writer, aff.mapKeyToPrimaryAndBackups(key));
break;
}
case OP_MAP_PARTITION_TO_PRIMARY_AND_BACKUPS: {
int part = reader.readObject();
platformCtx.writeNodes(writer, aff.mapPartitionToPrimaryAndBackups(part));
break;
}
case OP_MAP_KEYS_TO_NODES: {
Collection<Object> keys = reader.readCollection();
Map<ClusterNode, Collection<Object>> map = aff.mapKeysToNodes(keys);
writer.writeInt(map.size());
for (Map.Entry<ClusterNode, Collection<Object>> e : map.entrySet()) {
platformCtx.addNode(e.getKey());
writer.writeUuid(e.getKey().id());
writer.writeObject(e.getValue());
}
break;
}
case OP_MAP_PARTITIONS_TO_NODES: {
Collection<Integer> parts = reader.readCollection();
Map<Integer, ClusterNode> map = aff.mapPartitionsToNodes(parts);
writer.writeInt(map.size());
for (Map.Entry<Integer, ClusterNode> e : map.entrySet()) {
platformCtx.addNode(e.getValue());
writer.writeInt(e.getKey());
writer.writeUuid(e.getValue().id());
}
break;
}
default:
throwUnsupported(type);
}
}
/**
* @return Gets number of partitions in cache.
*/
public int partitions() {
return aff.partitions();
}
}
| |
/*
* Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.auth;
import static com.amazonaws.util.StringUtils.UTF8;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.InputStream;
import java.net.URI;
import java.security.DigestInputStream;
import java.security.MessageDigest;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.SortedMap;
import java.util.TreeMap;
import javax.crypto.Mac;
import javax.crypto.spec.SecretKeySpec;
import com.amazonaws.AmazonClientException;
import com.amazonaws.ReadLimitInfo;
import com.amazonaws.SDKGlobalTime;
import com.amazonaws.SignableRequest;
import com.amazonaws.internal.SdkDigestInputStream;
import com.amazonaws.util.Base64;
import com.amazonaws.util.BinaryUtils;
import com.amazonaws.util.SdkHttpUtils;
/**
* Abstract base class for AWS signing protocol implementations. Provides
* utilities commonly needed by signing protocols such as computing
* canonicalized host names, query string parameters, etc.
* <p>
* Not intended to be sub-classed by developers.
*/
public abstract class AbstractAWSSigner implements Signer {
public static final String EMPTY_STRING_SHA256_HEX;
static {
EMPTY_STRING_SHA256_HEX = BinaryUtils.toHex(doHash(""));
}
/**
* Computes an RFC 2104-compliant HMAC signature and returns the result as a
* Base64 encoded string.
*/
protected String signAndBase64Encode(String data, String key,
SigningAlgorithm algorithm) throws AmazonClientException {
return signAndBase64Encode(data.getBytes(UTF8), key, algorithm);
}
/**
* Computes an RFC 2104-compliant HMAC signature for an array of bytes and
* returns the result as a Base64 encoded string.
*/
protected String signAndBase64Encode(byte[] data, String key,
SigningAlgorithm algorithm) throws AmazonClientException {
try {
byte[] signature = sign(data, key.getBytes(UTF8), algorithm);
return Base64.encodeAsString(signature);
} catch (Exception e) {
throw new AmazonClientException(
"Unable to calculate a request signature: "
+ e.getMessage(), e);
}
}
public byte[] sign(String stringData, byte[] key,
SigningAlgorithm algorithm) throws AmazonClientException {
try {
byte[] data = stringData.getBytes(UTF8);
return sign(data, key, algorithm);
} catch (Exception e) {
throw new AmazonClientException(
"Unable to calculate a request signature: "
+ e.getMessage(), e);
}
}
public byte[] signWithMac(String stringData, Mac mac) {
try {
return mac.doFinal(stringData.getBytes(UTF8));
} catch (Exception e) {
throw new AmazonClientException(
"Unable to calculate a request signature: "
+ e.getMessage(), e);
}
}
protected byte[] sign(byte[] data, byte[] key,
SigningAlgorithm algorithm) throws AmazonClientException {
try {
Mac mac = Mac.getInstance(algorithm.toString());
mac.init(new SecretKeySpec(key, algorithm.toString()));
return mac.doFinal(data);
} catch (Exception e) {
throw new AmazonClientException(
"Unable to calculate a request signature: "
+ e.getMessage(), e);
}
}
/**
* Hashes the string contents (assumed to be UTF-8) using the SHA-256
* algorithm.
*
* @param text
* The string to hash.
*
* @return The hashed bytes from the specified string.
*
* @throws AmazonClientException
* If the hash cannot be computed.
*/
public byte[] hash(String text) throws AmazonClientException {
return AbstractAWSSigner.doHash(text);
}
private static byte[] doHash(String text) throws AmazonClientException {
try {
MessageDigest md = MessageDigest.getInstance("SHA-256");
md.update(text.getBytes(UTF8));
return md.digest();
} catch (Exception e) {
throw new AmazonClientException(
"Unable to compute hash while signing request: "
+ e.getMessage(), e);
}
}
protected byte[] hash(InputStream input) throws AmazonClientException {
try {
MessageDigest md = MessageDigest.getInstance("SHA-256");
@SuppressWarnings("resource")
DigestInputStream digestInputStream = new SdkDigestInputStream(
input, md);
byte[] buffer = new byte[1024];
while (digestInputStream.read(buffer) > -1)
;
return digestInputStream.getMessageDigest().digest();
} catch (Exception e) {
throw new AmazonClientException(
"Unable to compute hash while signing request: "
+ e.getMessage(), e);
}
}
/**
* Hashes the binary data using the SHA-256 algorithm.
*
* @param data
* The binary data to hash.
*
* @return The hashed bytes from the specified data.
*
* @throws AmazonClientException
* If the hash cannot be computed.
*/
public byte[] hash(byte[] data) throws AmazonClientException {
try {
MessageDigest md = MessageDigest.getInstance("SHA-256");
md.update(data);
return md.digest();
} catch (Exception e) {
throw new AmazonClientException(
"Unable to compute hash while signing request: "
+ e.getMessage(), e);
}
}
/**
* Examines the specified query string parameters and returns a
* canonicalized form.
* <p>
* The canonicalized query string is formed by first sorting all the query
* string parameters, then URI encoding both the key and value and then
* joining them, in order, separating key value pairs with an '&'.
*
* @param parameters
* The query string parameters to be canonicalized.
*
* @return A canonicalized form for the specified query string parameters.
*/
protected String getCanonicalizedQueryString(Map<String, List<String>> parameters) {
final SortedMap<String, List<String>> sorted = new TreeMap<String, List<String>>();
/**
* Signing protocol expects the param values also to be sorted after url
* encoding in addition to sorted parameter names.
*/
for (Map.Entry<String, List<String>> entry : parameters.entrySet()) {
final String encodedParamName = SdkHttpUtils.urlEncode(
entry.getKey(), false);
final List<String> paramValues = entry.getValue();
final List<String> encodedValues = new ArrayList<String>(
paramValues.size());
for (String value : paramValues) {
encodedValues.add(SdkHttpUtils.urlEncode(value, false));
}
Collections.sort(encodedValues);
sorted.put(encodedParamName, encodedValues);
}
final StringBuilder result = new StringBuilder();
for(Map.Entry<String, List<String>> entry : sorted.entrySet()) {
for(String value : entry.getValue()) {
if (result.length() > 0) {
result.append("&");
}
result.append(entry.getKey())
.append("=")
.append(value);
}
}
return result.toString();
}
protected String getCanonicalizedQueryString(SignableRequest<?> request) {
/*
* If we're using POST and we don't have any request payload content,
* then any request query parameters will be sent as the payload, and
* not in the actual query string.
*/
if (SdkHttpUtils.usePayloadForQueryParameters(request))
return "";
return this.getCanonicalizedQueryString(request.getParameters());
}
/**
* Returns the request's payload as binary data.
*
* @param request
* The request
* @return The data from the request's payload, as binary data.
*/
protected byte[] getBinaryRequestPayload(SignableRequest<?> request) {
if (SdkHttpUtils.usePayloadForQueryParameters(request)) {
String encodedParameters = SdkHttpUtils.encodeParameters(request);
if (encodedParameters == null)
return new byte[0];
return encodedParameters.getBytes(UTF8);
}
return getBinaryRequestPayloadWithoutQueryParams(request);
}
/**
* Returns the request's payload as a String.
*
* @param request
* The request
* @return The data from the request's payload, as a string.
*/
protected String getRequestPayload(SignableRequest<?> request) {
return newString(getBinaryRequestPayload(request));
}
/**
* Returns the request's payload contents as a String, without processing
* any query string params (i.e. no form encoding for query params).
*
* @param request
* The request
* @return the request's payload contents as a String, not including any
* form encoding of query string params.
*/
protected String getRequestPayloadWithoutQueryParams(SignableRequest<?> request) {
return newString(getBinaryRequestPayloadWithoutQueryParams(request));
}
/**
* Returns the request's payload contents as binary data, without processing
* any query string params (i.e. no form encoding for query params).
*
* @param request
* The request
* @return The request's payload contents as binary data, not including any
* form encoding of query string params.
*/
protected byte[] getBinaryRequestPayloadWithoutQueryParams(SignableRequest<?> request) {
InputStream content = getBinaryRequestPayloadStreamWithoutQueryParams(request);
try {
ReadLimitInfo info = request.getReadLimitInfo();
content.mark(info == null ? -1 : info.getReadLimit());
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
byte[] buffer = new byte[1024 * 5];
while (true) {
int bytesRead = content.read(buffer);
if (bytesRead == -1) break;
byteArrayOutputStream.write(buffer, 0, bytesRead);
}
byteArrayOutputStream.close();
content.reset();
return byteArrayOutputStream.toByteArray();
} catch (Exception e) {
throw new AmazonClientException("Unable to read request payload to sign request: " + e.getMessage(), e);
}
}
protected InputStream getBinaryRequestPayloadStream(SignableRequest<?> request) {
if (SdkHttpUtils.usePayloadForQueryParameters(request)) {
String encodedParameters = SdkHttpUtils.encodeParameters(request);
if (encodedParameters == null)
return new ByteArrayInputStream(new byte[0]);
return new ByteArrayInputStream(
encodedParameters.getBytes(UTF8));
}
return getBinaryRequestPayloadStreamWithoutQueryParams(request);
}
protected InputStream getBinaryRequestPayloadStreamWithoutQueryParams(SignableRequest<?> request) {
try {
InputStream is = request.getContentUnwrapped();
if (is == null)
return new ByteArrayInputStream(new byte[0]);
if (!is.markSupported())
throw new AmazonClientException("Unable to read request payload to sign request.");
return is;
} catch (AmazonClientException e) {
throw e;
} catch (Exception e) {
throw new AmazonClientException("Unable to read request payload to sign request: " + e.getMessage(), e);
}
}
protected String getCanonicalizedResourcePath(String resourcePath) {
return getCanonicalizedResourcePath(resourcePath, true);
}
protected String getCanonicalizedResourcePath(String resourcePath, boolean urlEncode) {
if (resourcePath == null || resourcePath.isEmpty()) {
return "/";
} else {
String value = urlEncode ? SdkHttpUtils.urlEncode(resourcePath, true) : resourcePath;
if (value.startsWith("/")) {
return value;
} else {
return "/".concat(value);
}
}
}
protected String getCanonicalizedEndpoint(URI endpoint) {
String endpointForStringToSign = endpoint.getHost().toLowerCase();
/*
* Apache HttpClient will omit the port in the Host header for default
* port values (i.e. 80 for HTTP and 443 for HTTPS) even if we
* explicitly specify it, so we need to be careful that we use the same
* value here when we calculate the string to sign and in the Host
* header we send in the HTTP request.
*/
if (SdkHttpUtils.isUsingNonDefaultPort(endpoint)) {
endpointForStringToSign += ":" + endpoint.getPort();
}
return endpointForStringToSign;
}
/**
* Loads the individual access key ID and secret key from the specified
* credentials, ensuring that access to the credentials is synchronized on
* the credentials object itself, and trimming any extra whitespace from the
* credentials.
* <p>
* Returns either a {@link BasicSessionCredentials} or a
* {@link BasicAWSCredentials} object, depending on the input type.
*
* @param credentials
* @return A new credentials object with the sanitized credentials.
*/
protected AWSCredentials sanitizeCredentials(AWSCredentials credentials) {
String accessKeyId = null;
String secretKey = null;
String token = null;
synchronized (credentials) {
accessKeyId = credentials.getAWSAccessKeyId();
secretKey = credentials.getAWSSecretKey();
if ( credentials instanceof AWSSessionCredentials ) {
token = ((AWSSessionCredentials) credentials).getSessionToken();
}
}
if (secretKey != null) secretKey = secretKey.trim();
if (accessKeyId != null) accessKeyId = accessKeyId.trim();
if (token != null) token = token.trim();
if (credentials instanceof AWSSessionCredentials) {
return new BasicSessionCredentials(accessKeyId, secretKey, token);
}
return new BasicAWSCredentials(accessKeyId, secretKey);
}
/**
* Safely converts a UTF-8 encoded byte array into a String.
*
* @param bytes UTF-8 encoded binary character data.
*
* @return The converted String object.
*/
protected String newString(byte[] bytes) {
return new String(bytes, UTF8);
}
/**
* Returns the current time minus the given offset in seconds.
* The intent is to adjust the current time in the running JVM to the
* corresponding wall clock time at AWS for request signing purposes.
*
* @param offsetInSeconds
* offset in seconds
*/
protected Date getSignatureDate(int offsetInSeconds) {
return new Date(System.currentTimeMillis() - offsetInSeconds*1000);
}
/**
* Returns the time offset in seconds.
*/
@Deprecated
protected int getTimeOffset(SignableRequest<?> request) {
final int globleOffset = SDKGlobalTime.getGlobalTimeOffset();
return globleOffset == 0 ? request.getTimeOffset() : globleOffset;
}
/**
* Adds session credentials to the request given.
*
* @param request
* The request to add session credentials information to
* @param credentials
* The session credentials to add to the request
*/
protected abstract void addSessionCredentials(SignableRequest<?> request,
AWSSessionCredentials credentials);
}
| |
/*
* Copyright 2015-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.cli;
import static org.junit.Assert.assertEquals;
import com.facebook.buck.apple.AppleBinaryBuilder;
import com.facebook.buck.apple.AppleBundleBuilder;
import com.facebook.buck.apple.AppleBundleExtension;
import com.facebook.buck.apple.AppleLibraryBuilder;
import com.facebook.buck.apple.AppleTestBuilder;
import com.facebook.buck.apple.ProjectGenerator;
import com.facebook.buck.apple.ProjectGeneratorTestUtils;
import com.facebook.buck.apple.XcodeWorkspaceConfigBuilder;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.BuildTargetFactory;
import com.facebook.buck.model.Either;
import com.facebook.buck.rules.TargetGraph;
import com.facebook.buck.rules.TargetGraphAndTargets;
import com.facebook.buck.rules.TargetNode;
import com.facebook.buck.rules.TestSourcePath;
import com.facebook.buck.testutil.TargetGraphFactory;
import com.google.common.base.Optional;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Map;
public class ProjectCommandXcodeTest {
private TargetNode<?> barLibNode;
private TargetNode<?> fooLibNode;
private TargetNode<?> fooBinBinaryNode;
private TargetNode<?> fooBinNode;
private TargetNode<?> bazLibNode;
private TargetNode<?> bazTestNode;
private TargetNode<?> fooTestNode;
private TargetNode<?> fooBinTestNode;
private TargetNode<?> quxBinNode;
private TargetNode<?> workspaceNode;
private TargetNode<?> workspaceExtraTestNode;
private TargetNode<?> smallWorkspaceNode;
TargetGraph targetGraph;
@Before
public void buildGraph() {
// Create the following dep tree:
//
// FooBin -has-test-> FooBinTest
// |
// V
// FooLib -has-test-> FooLibTest
// | |
// V V
// BarLib BazLib -has-test-> BazLibTest
// ^
// |
// QuxBin
//
// FooBin and BazLib use "tests" to specify their tests while FooLibTest uses source_under_test
// to specify that it is a test of FooLib.
BuildTarget bazTestTarget = BuildTargetFactory.newInstance("//baz:xctest");
BuildTarget fooBinTestTarget = BuildTargetFactory.newInstance("//foo:bin-xctest");
BuildTarget barLibTarget = BuildTargetFactory.newInstance("//bar:lib");
barLibNode = AppleLibraryBuilder
.createBuilder(barLibTarget)
.build();
BuildTarget bazLibTarget = BuildTargetFactory.newInstance("//baz:lib");
bazLibNode = AppleLibraryBuilder
.createBuilder(bazLibTarget)
.setTests(Optional.of(ImmutableSortedSet.of(bazTestTarget)))
.build();
BuildTarget fooTestTarget = BuildTargetFactory.newInstance("//foo:lib-xctest");
fooTestNode = AppleTestBuilder
.createBuilder(fooTestTarget)
.setExtension(Either.<AppleBundleExtension, String>ofLeft(AppleBundleExtension.XCTEST))
.setDeps(Optional.of(ImmutableSortedSet.of(bazLibTarget)))
.setInfoPlist(new TestSourcePath("Info.plist"))
.build();
BuildTarget fooLibTarget = BuildTargetFactory.newInstance("//foo:lib");
fooLibNode = AppleLibraryBuilder
.createBuilder(fooLibTarget)
.setDeps(Optional.of(ImmutableSortedSet.of(barLibTarget)))
.setTests(Optional.of(ImmutableSortedSet.of(fooTestTarget)))
.build();
BuildTarget fooBinBinaryTarget = BuildTargetFactory.newInstance("//foo:binbinary");
fooBinBinaryNode = AppleBinaryBuilder
.createBuilder(fooBinBinaryTarget)
.setDeps(Optional.of(ImmutableSortedSet.of(fooLibTarget)))
.build();
BuildTarget fooBinTarget = BuildTargetFactory.newInstance("//foo:bin");
fooBinNode = AppleBundleBuilder
.createBuilder(fooBinTarget)
.setExtension(Either.<AppleBundleExtension, String>ofLeft(AppleBundleExtension.APP))
.setBinary(fooBinBinaryTarget)
.setTests(Optional.of(ImmutableSortedSet.of(fooBinTestTarget)))
.setInfoPlist(new TestSourcePath("Info.plist"))
.build();
bazTestNode = AppleTestBuilder
.createBuilder(bazTestTarget)
.setDeps(Optional.of(ImmutableSortedSet.of(bazLibTarget)))
.setExtension(Either.<AppleBundleExtension, String>ofLeft(AppleBundleExtension.XCTEST))
.setInfoPlist(new TestSourcePath("Info.plist"))
.build();
fooBinTestNode = AppleTestBuilder
.createBuilder(fooBinTestTarget)
.setDeps(Optional.of(ImmutableSortedSet.of(fooBinTarget)))
.setExtension(Either.<AppleBundleExtension, String>ofLeft(AppleBundleExtension.XCTEST))
.setInfoPlist(new TestSourcePath("Info.plist"))
.build();
BuildTarget quxBinTarget = BuildTargetFactory.newInstance("//qux:bin");
quxBinNode = AppleBinaryBuilder
.createBuilder(quxBinTarget)
.setDeps(Optional.of(ImmutableSortedSet.of(barLibTarget)))
.build();
BuildTarget workspaceExtraTestTarget = BuildTargetFactory.newInstance("//foo:extra-xctest");
workspaceExtraTestNode = AppleTestBuilder
.createBuilder(workspaceExtraTestTarget)
.setExtension(Either.<AppleBundleExtension, String>ofLeft(AppleBundleExtension.XCTEST))
.setInfoPlist(new TestSourcePath("Info.plist"))
.build();
BuildTarget workspaceTarget = BuildTargetFactory.newInstance("//foo:workspace");
workspaceNode = XcodeWorkspaceConfigBuilder
.createBuilder(workspaceTarget)
.setWorkspaceName(Optional.of("workspace"))
.setSrcTarget(Optional.of(fooBinTarget))
.setExtraTests(Optional.of(ImmutableSortedSet.of(workspaceExtraTestTarget)))
.build();
BuildTarget smallWorkspaceTarget = BuildTargetFactory.newInstance("//baz:small-workspace");
smallWorkspaceNode = XcodeWorkspaceConfigBuilder
.createBuilder(smallWorkspaceTarget)
.setWorkspaceName(Optional.of("small-workspace"))
.setSrcTarget(Optional.of(bazLibTarget))
.build();
targetGraph = TargetGraphFactory.newInstance(
barLibNode,
fooLibNode,
fooBinBinaryNode,
fooBinNode,
bazLibNode,
bazTestNode,
fooTestNode,
fooBinTestNode,
quxBinNode,
workspaceExtraTestNode,
workspaceNode,
smallWorkspaceNode);
}
@Test
public void testCreateTargetGraphWithoutTests() {
TargetGraphAndTargets targetGraphAndTargets = ProjectCommandTests.createTargetGraph(
targetGraph,
ProjectCommand.Ide.XCODE,
ImmutableSet.<BuildTarget>of(),
/* withTests = */ false,
/* withDependenciesTests = */ false);
assertEquals(
ImmutableSortedSet.<TargetNode<?>>of(
workspaceNode,
fooBinNode,
fooBinBinaryNode,
fooLibNode,
barLibNode,
smallWorkspaceNode,
bazLibNode,
workspaceExtraTestNode),
ImmutableSortedSet.copyOf(
targetGraphAndTargets.getTargetGraph().getNodes()));
}
@Test
public void testCreateTargetGraphWithTests() {
TargetGraphAndTargets targetGraphAndTargets = ProjectCommandTests.createTargetGraph(
targetGraph,
ProjectCommand.Ide.XCODE,
ImmutableSet.<BuildTarget>of(),
/* withTests = */ true,
/* withDependenciesTests */ true);
assertEquals(
ImmutableSortedSet.<TargetNode<?>>of(
workspaceNode,
fooBinNode,
fooBinBinaryNode,
fooLibNode,
fooBinTestNode,
fooTestNode,
barLibNode,
smallWorkspaceNode,
bazLibNode,
bazTestNode,
workspaceExtraTestNode),
ImmutableSortedSet.copyOf(
targetGraphAndTargets.getTargetGraph().getNodes()));
}
@Test
public void testCreateTargetGraphForSliceWithoutTests() {
TargetGraphAndTargets targetGraphAndTargets = ProjectCommandTests.createTargetGraph(
targetGraph,
ProjectCommand.Ide.XCODE,
ImmutableSet.of(workspaceNode.getBuildTarget()),
/* withTests = */ false,
/* withDependenciesTests */ false);
assertEquals(
ImmutableSortedSet.<TargetNode<?>>of(
workspaceNode,
fooBinNode,
fooBinBinaryNode,
fooLibNode,
barLibNode,
workspaceExtraTestNode),
ImmutableSortedSet.copyOf(
targetGraphAndTargets.getTargetGraph().getNodes()));
}
@Test
public void testCreateTargetGraphForSliceWithTests() {
TargetGraphAndTargets targetGraphAndTargets = ProjectCommandTests.createTargetGraph(
targetGraph,
ProjectCommand.Ide.XCODE,
ImmutableSet.of(workspaceNode.getBuildTarget()),
/* withTests = */ true,
/* withDependenciesTests */ true);
assertEquals(
ImmutableSortedSet.<TargetNode<?>>of(
workspaceNode,
fooBinNode,
fooBinBinaryNode,
fooLibNode,
fooBinTestNode,
fooTestNode,
barLibNode,
bazLibNode,
workspaceExtraTestNode),
ImmutableSortedSet.copyOf(
targetGraphAndTargets.getTargetGraph().getNodes()));
}
@Test
public void testCreateTargetGraphForSmallSliceWithoutTests() {
TargetGraphAndTargets targetGraphAndTargets = ProjectCommandTests.createTargetGraph(
targetGraph,
ProjectCommand.Ide.XCODE,
ImmutableSet.of(smallWorkspaceNode.getBuildTarget()),
/* withTests = */ false,
/* withDependenciesTests */ false);
assertEquals(
ImmutableSortedSet.of(
smallWorkspaceNode,
bazLibNode),
ImmutableSortedSet.copyOf(
targetGraphAndTargets.getTargetGraph().getNodes()));
}
@Test
public void testCreateTargetGraphForSmallSliceWithTests() {
TargetGraphAndTargets targetGraphAndTargets = ProjectCommandTests.createTargetGraph(
targetGraph,
ProjectCommand.Ide.XCODE,
ImmutableSet.of(smallWorkspaceNode.getBuildTarget()),
/* withTests = */ true,
/* withDependenciesTests */ true);
assertEquals(
ImmutableSortedSet.of(
smallWorkspaceNode,
bazLibNode,
bazTestNode),
ImmutableSortedSet.copyOf(
targetGraphAndTargets.getTargetGraph().getNodes()));
}
@Test
public void testTargetWithTests() throws IOException, InterruptedException {
Map<Path, ProjectGenerator> projectGenerators = generateProjectsForTests(
ImmutableSet.of(fooBinNode.getBuildTarget()),
/* withTests = */ true,
/* withDependenciesTests */ true);
ProjectGeneratorTestUtils.assertTargetExists(
projectGenerators.get(Paths.get("foo")), "bin-xctest");
ProjectGeneratorTestUtils.assertTargetExists(
projectGenerators.get(Paths.get("foo")), "lib-xctest");
}
private Map<Path, ProjectGenerator> generateProjectsForTests(
ImmutableSet<BuildTarget> passedInTargetsSet,
boolean isWithTests,
boolean isWithDependenciesTests)
throws IOException, InterruptedException {
return ProjectCommandTests.generateWorkspacesForTargets(
targetGraph,
passedInTargetsSet,
isWithTests,
isWithDependenciesTests,
/* isReadonly = */ false,
/* isBuildWithBuck = */ false,
/* isCombinedProjects = */ false,
/* isCombinesTestBundles = */ false);
}
@Test
public void testTargetWithoutDependenciesTests() throws IOException, InterruptedException {
Map<Path, ProjectGenerator> projectGenerators = generateProjectsForTests(
ImmutableSet.of(fooBinNode.getBuildTarget()),
/* withTests = */ true,
/* withDependenciesTests */ false);
ProjectGeneratorTestUtils.assertTargetExists(
projectGenerators.get(Paths.get("foo")), "bin-xctest");
ProjectGeneratorTestUtils.assertTargetDoesNotExists(
projectGenerators.get(Paths.get("foo")), "lib-xctest");
}
@Test
public void testTargetWithoutTests() throws IOException, InterruptedException {
Map<Path, ProjectGenerator> projectGenerators = generateProjectsForTests(
ImmutableSet.of(fooBinNode.getBuildTarget()),
/* withTests = */ false,
/* withDependenciesTests */ false);
ProjectGeneratorTestUtils.assertTargetDoesNotExists(
projectGenerators.get(Paths.get("foo")), "bin-xctest");
ProjectGeneratorTestUtils.assertTargetDoesNotExists(
projectGenerators.get(Paths.get("foo")), "lib-xctest");
}
@Test
public void testWorkspaceWithoutDependenciesTests() throws IOException, InterruptedException {
Map<Path, ProjectGenerator> projectGenerators = generateProjectsForTests(
ImmutableSet.of(workspaceNode.getBuildTarget()),
/* withTests = */ true,
/* withDependenciesTests */ false);
ProjectGeneratorTestUtils.assertTargetExists(
projectGenerators.get(Paths.get("foo")), "bin-xctest");
ProjectGeneratorTestUtils.assertTargetDoesNotExists(
projectGenerators.get(Paths.get("foo")), "lib-xctest");
ProjectGeneratorTestUtils.assertTargetExists(
projectGenerators.get(Paths.get("foo")), "extra-xctest");
}
@Test
public void testWorkspaceWithoutExtraTestsWithoutDependenciesTests()
throws IOException, InterruptedException {
Map<Path, ProjectGenerator> projectGenerators = generateProjectsForTests(
ImmutableSet.of(smallWorkspaceNode.getBuildTarget()),
/* withTests = */ true,
/* withDependenciesTests */ false);
ProjectGeneratorTestUtils.assertTargetExists(
projectGenerators.get(Paths.get("baz")), "lib");
ProjectGeneratorTestUtils.assertTargetExists(
projectGenerators.get(Paths.get("baz")), "xctest");
}
}
| |
package org.apache.continuum.web.action.admin;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.continuum.configuration.BuildAgentConfiguration;
import org.apache.continuum.configuration.BuildAgentGroupConfiguration;
import org.apache.continuum.web.util.AuditLog;
import org.apache.continuum.web.util.AuditLogConstants;
import org.apache.maven.continuum.ContinuumException;
import org.apache.maven.continuum.configuration.ConfigurationService;
import org.apache.maven.continuum.model.system.Installation;
import org.apache.maven.continuum.model.system.Profile;
import org.apache.maven.continuum.security.ContinuumRoleConstants;
import org.apache.maven.continuum.web.action.ContinuumConfirmAction;
import org.codehaus.plexus.component.annotations.Component;
import org.codehaus.plexus.redback.rbac.Resource;
import org.codehaus.plexus.util.StringUtils;
import org.codehaus.redback.integration.interceptor.SecureAction;
import org.codehaus.redback.integration.interceptor.SecureActionBundle;
import org.codehaus.redback.integration.interceptor.SecureActionException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* @author Maria Catherine Tan
*/
@Component( role = com.opensymphony.xwork2.Action.class, hint = "buildAgent", instantiationStrategy = "per-lookup" )
public class BuildAgentAction
extends ContinuumConfirmAction
implements SecureAction
{
private static final Logger logger = LoggerFactory.getLogger( BuildAgentAction.class );
private List<BuildAgentConfiguration> buildAgents;
private BuildAgentConfiguration buildAgent;
private BuildAgentGroupConfiguration buildAgentGroup;
private List<BuildAgentGroupConfiguration> buildAgentGroups;
private List<BuildAgentConfiguration> selectedbuildAgents;
private List<String> selectedBuildAgentIds;
private List<Installation> installations;
private boolean confirmed;
private String type;
private String typeGroup;
public void prepare()
throws Exception
{
super.prepare();
this.setBuildAgents( getContinuum().getConfiguration().getBuildAgents() );
}
public String input()
throws Exception
{
if ( buildAgent != null && !StringUtils.isBlank( buildAgent.getUrl() ) )
{
String escapedBuildAgentUrl = StringEscapeUtils.escapeXml( buildAgent.getUrl() );
buildAgent.setUrl( escapedBuildAgentUrl );
List<BuildAgentConfiguration> agents = getContinuum().getConfiguration().getBuildAgents();
for ( BuildAgentConfiguration agent : agents )
{
if ( agent.getUrl().equals( escapedBuildAgentUrl ) )
{
buildAgent = agent;
type = "edit";
}
}
}
else
{
type = "new";
buildAgent = new BuildAgentConfiguration();
buildAgent.setEnabled( true );
}
return INPUT;
}
public String list()
throws Exception
{
this.buildAgents = getContinuum().getConfiguration().getBuildAgents();
this.buildAgentGroups = getContinuum().getConfiguration().getBuildAgentGroups();
return SUCCESS;
}
public String view()
throws Exception
{
ConfigurationService configuration = getContinuum().getConfiguration();
if ( buildAgent != null )
{
for ( BuildAgentConfiguration agent : configuration.getBuildAgents() )
{
if ( agent.getUrl().equals( buildAgent.getUrl() ) )
{
buildAgent = agent;
try
{
installations = getContinuum().getDistributedBuildManager().getAvailableInstallations(
buildAgent.getUrl() );
}
catch ( ContinuumException e )
{
logger.error( "Unable to retrieve installations of build agent '" + agent.getUrl() + "'", e );
}
break;
}
}
}
return SUCCESS;
}
public String save()
throws Exception
{
boolean found = false;
ConfigurationService configuration = getContinuum().getConfiguration();
// escape xml to prevent xss attacks
buildAgent.setDescription(
StringEscapeUtils.escapeXml( StringEscapeUtils.unescapeXml( buildAgent.getDescription() ) ) );
if ( configuration.getBuildAgents() != null )
{
for ( BuildAgentConfiguration agent : configuration.getBuildAgents() )
{
if ( agent.getUrl().equals( buildAgent.getUrl() ) )
{
if ( type.equals( "new" ) )
{
addActionError( getText( "buildAgent.error.duplicate" ) );
return INPUT;
}
else
{
agent.setDescription( buildAgent.getDescription() );
agent.setEnabled( buildAgent.isEnabled() );
configuration.updateBuildAgent( agent );
configuration.store();
}
found = true;
}
}
}
// update first, so that we don't add or change it if it fails
try
{
getContinuum().getDistributedBuildManager().update( buildAgent );
}
catch ( ContinuumException e )
{
addActionError( e.getMessage() );
return INPUT;
}
AuditLog event = new AuditLog( "Build Agent URL=" + buildAgent.getUrl(), AuditLogConstants.MODIFY_BUILD_AGENT );
event.setCategory( AuditLogConstants.BUILD_AGENT );
event.setCurrentUser( getPrincipal() );
if ( !found )
{
configuration.addBuildAgent( buildAgent );
configuration.store();
event.setAction( AuditLogConstants.ADD_BUILD_AGENT );
}
event.log();
return SUCCESS;
}
public String delete()
throws Exception
{
buildAgent.setUrl( StringEscapeUtils.escapeXml( buildAgent.getUrl() ) );
if ( !confirmed )
{
return CONFIRM;
}
if ( getContinuum().getDistributedBuildManager().isBuildAgentBusy( buildAgent.getUrl() ) )
{
addActionError( getText( "buildAgent.error.delete.busy" ) );
return ERROR;
}
ConfigurationService configuration = getContinuum().getConfiguration();
if ( configuration.getBuildAgentGroups() != null )
{
for ( BuildAgentGroupConfiguration buildAgentGroup : configuration.getBuildAgentGroups() )
{
if ( configuration.containsBuildAgentUrl( buildAgent.getUrl(), buildAgentGroup ) )
{
addActionError( getText( "buildAgent.error.remove.in.use" ) );
return ERROR;
}
}
}
if ( configuration.getBuildAgents() != null )
{
for ( BuildAgentConfiguration agent : configuration.getBuildAgents() )
{
if ( buildAgent.getUrl().equals( agent.getUrl() ) )
{
getContinuum().getDistributedBuildManager().removeDistributedBuildQueueOfAgent(
buildAgent.getUrl() );
configuration.removeBuildAgent( agent );
configuration.store();
AuditLog event =
new AuditLog( "Build Agent URL=" + agent.getUrl(), AuditLogConstants.REMOVE_BUILD_AGENT );
event.setCategory( AuditLogConstants.BUILD_AGENT );
event.setCurrentUser( getPrincipal() );
event.log();
getContinuum().getDistributedBuildManager().reload();
return SUCCESS;
}
}
}
addActionError( getText( "buildAgent.error.notfound" ) );
return ERROR;
}
public String deleteGroup()
throws Exception
{
buildAgentGroup.setName( StringEscapeUtils.escapeXml( buildAgentGroup.getName() ) );
if ( !confirmed )
{
return CONFIRM;
}
List<Profile> profiles = getContinuum().getProfileService().getAllProfiles();
for ( Profile profile : profiles )
{
if ( buildAgentGroup.getName().equals( profile.getBuildAgentGroup() ) )
{
addActionError( getText( "buildAgentGroup.error.remove.in.use", new String[] { profile.getName() } ) );
return ERROR;
}
}
ConfigurationService configuration = getContinuum().getConfiguration();
for ( BuildAgentGroupConfiguration group : configuration.getBuildAgentGroups() )
{
if ( buildAgentGroup.getName().equals( group.getName() ) )
{
configuration.removeBuildAgentGroup( group );
AuditLog event =
new AuditLog( "Build Agent Group=" + group.getName(), AuditLogConstants.REMOVE_BUILD_AGENT_GROUP );
event.setCategory( AuditLogConstants.BUILD_AGENT );
event.setCurrentUser( getPrincipal() );
event.log();
return SUCCESS;
}
}
addActionError( getText( "buildAgentGroup.error.doesnotexist" ) );
return ERROR;
}
public String saveGroup()
throws Exception
{
boolean found = false;
ConfigurationService configuration = getContinuum().getConfiguration();
selectedbuildAgents = getBuildAgentsFromSelectedBuildAgents();
if ( buildAgentGroup.getName() != null )
{
if ( buildAgentGroup.getName().equals( "" ) )
{
addActionError( getText( "buildAgentGroup.error.name.required" ) );
return INPUT;
}
else if ( buildAgentGroup.getName().trim().equals( "" ) )
{
addActionError( getText( "buildAgentGroup.error.name.cannot.be.spaces" ) );
return INPUT;
}
}
if ( configuration.getBuildAgentGroups() != null )
{
for ( BuildAgentGroupConfiguration group : configuration.getBuildAgentGroups() )
{
if ( buildAgentGroup.getName().equals( group.getName() ) )
{
group.setName( buildAgentGroup.getName() );
configuration.updateBuildAgentGroup( group );
found = true;
break;
}
}
}
AuditLog event = new AuditLog( "Build Agent Group=" + buildAgentGroup.getName(),
AuditLogConstants.MODIFY_BUILD_AGENT_GROUP );
event.setCategory( AuditLogConstants.BUILD_AGENT );
event.setCurrentUser( getPrincipal() );
if ( !found )
{
buildAgentGroup.setBuildAgents( selectedbuildAgents );
configuration.addBuildAgentGroup( buildAgentGroup );
event.setAction( AuditLogConstants.ADD_BUILD_AGENT_GROUP );
}
else
// found
{
if ( typeGroup.equals( "new" ) )
{
addActionError( getText( "buildAgentGroup.error.duplicate" ) );
return INPUT;
}
else if ( typeGroup.equals( "edit" ) )
{
buildAgentGroup.setBuildAgents( selectedbuildAgents );
configuration.updateBuildAgentGroup( buildAgentGroup );
}
}
getContinuum().getDistributedBuildManager().reload();
event.log();
return SUCCESS;
}
public String inputGroup()
throws Exception
{
ConfigurationService configuration = getContinuum().getConfiguration();
if ( buildAgentGroup != null && !StringUtils.isBlank( buildAgentGroup.getName() ) )
{
String escapedBuildAgentGroupName = StringEscapeUtils.escapeXml( buildAgentGroup.getName() );
buildAgentGroup.setName( escapedBuildAgentGroupName );
List<BuildAgentGroupConfiguration> agentGroups = configuration.getBuildAgentGroups();
for ( BuildAgentGroupConfiguration group : agentGroups )
{
if ( group.getName().equals( escapedBuildAgentGroupName ) )
{
buildAgentGroup = group;
typeGroup = "edit";
this.buildAgentGroup = configuration.getBuildAgentGroup( escapedBuildAgentGroupName );
this.buildAgents = configuration.getBuildAgents();
this.selectedBuildAgentIds = new ArrayList<String>();
if ( this.buildAgentGroup.getBuildAgents() != null )
{
for ( BuildAgentConfiguration buildAgentConfiguration : buildAgentGroup.getBuildAgents() )
{
this.selectedBuildAgentIds.add( buildAgentConfiguration.getUrl() );
}
}
List<BuildAgentConfiguration> unusedBuildAgents = new ArrayList<BuildAgentConfiguration>();
for ( BuildAgentConfiguration agent : getBuildAgents() )
{
if ( !this.selectedBuildAgentIds.contains( agent.getUrl() ) )
{
unusedBuildAgents.add( agent );
}
}
this.setBuildAgents( unusedBuildAgents );
break;
}
}
}
else
{
buildAgentGroup = new BuildAgentGroupConfiguration();
typeGroup = "new";
}
return INPUT;
}
public SecureActionBundle getSecureActionBundle()
throws SecureActionException
{
SecureActionBundle bundle = new SecureActionBundle();
bundle.setRequiresAuthentication( true );
bundle.addRequiredAuthorization( ContinuumRoleConstants.CONTINUUM_MANAGE_DISTRIBUTED_BUILDS, Resource.GLOBAL );
return bundle;
}
private List<BuildAgentConfiguration> getBuildAgentsFromSelectedBuildAgents()
{
if ( this.selectedBuildAgentIds == null )
{
return Collections.EMPTY_LIST;
}
List<BuildAgentConfiguration> selectedbuildAgents = new ArrayList<BuildAgentConfiguration>();
for ( String ids : selectedBuildAgentIds )
{
BuildAgentConfiguration buildAgent = getContinuum().getConfiguration().getBuildAgent( ids );
if ( buildAgent != null )
{
selectedbuildAgents.add( buildAgent );
}
}
return selectedbuildAgents;
}
public List<BuildAgentConfiguration> getBuildAgents()
{
return buildAgents;
}
public void setBuildAgents( List<BuildAgentConfiguration> buildAgents )
{
this.buildAgents = buildAgents;
}
public BuildAgentConfiguration getBuildAgent()
{
return buildAgent;
}
public void setBuildAgent( BuildAgentConfiguration buildAgent )
{
this.buildAgent = buildAgent;
}
public List<Installation> getInstallations()
{
return installations;
}
public void setInstallations( List<Installation> installations )
{
this.installations = installations;
}
public boolean isConfirmed()
{
return confirmed;
}
public void setConfirmed( boolean confirmed )
{
this.confirmed = confirmed;
}
public String getType()
{
return type;
}
public void setType( String type )
{
this.type = type;
}
public List<BuildAgentGroupConfiguration> getBuildAgentGroups()
{
return buildAgentGroups;
}
public void setBuildAgentGroups( List<BuildAgentGroupConfiguration> buildAgentGroups )
{
this.buildAgentGroups = buildAgentGroups;
}
public BuildAgentGroupConfiguration getBuildAgentGroup()
{
return buildAgentGroup;
}
public void setBuildAgentGroup( BuildAgentGroupConfiguration buildAgentGroup )
{
this.buildAgentGroup = buildAgentGroup;
}
public String getTypeGroup()
{
return typeGroup;
}
public void setTypeGroup( String typeGroup )
{
this.typeGroup = typeGroup;
}
public List<BuildAgentConfiguration> getSelectedbuildAgents()
{
return selectedbuildAgents;
}
public void setSelectedbuildAgents( List<BuildAgentConfiguration> selectedbuildAgents )
{
this.selectedbuildAgents = selectedbuildAgents;
}
public List<String> getSelectedBuildAgentIds()
{
return selectedBuildAgentIds == null ? Collections.EMPTY_LIST : selectedBuildAgentIds;
}
public void setSelectedBuildAgentIds( List<String> selectedBuildAgentIds )
{
this.selectedBuildAgentIds = selectedBuildAgentIds;
}
}
| |
/**
* Copyright (C) 2011-2020 Red Hat, Inc. (https://github.com/Commonjava/indy)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.commonjava.indy.cassandra.data;
import com.datastax.driver.core.BoundStatement;
import com.datastax.driver.core.PreparedStatement;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.Row;
import com.datastax.driver.core.Session;
import com.datastax.driver.mapping.Mapper;
import com.datastax.driver.mapping.MappingManager;
import org.commonjava.indy.conf.IndyConfiguration;
import org.commonjava.indy.core.conf.IndyStoreManagerConfig;
import org.commonjava.indy.model.core.StoreKey;
import org.commonjava.indy.model.core.StoreType;
import org.commonjava.indy.subsys.cassandra.CassandraClient;
import org.commonjava.indy.subsys.cassandra.util.SchemaUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.PostConstruct;
import javax.enterprise.context.ApplicationScoped;
import javax.inject.Inject;
import java.util.HashSet;
import java.util.Set;
import static org.commonjava.indy.cassandra.data.CassandraStoreUtil.TABLE_AFFECTED_STORE;
import static org.commonjava.indy.cassandra.data.CassandraStoreUtil.TABLE_STORE;
@ApplicationScoped
public class CassandraStoreQuery
{
private final Logger logger = LoggerFactory.getLogger( getClass() );
@Inject
CassandraClient client;
@Inject
IndyStoreManagerConfig config;
@Inject
IndyConfiguration indyConfig;
private Mapper<DtxArtifactStore> storeMapper;
private Session session;
private PreparedStatement preparedSingleArtifactStoreQuery;
private PreparedStatement preparedArtifactStoresQuery;
private PreparedStatement preparedArtifactStoreDel;
private PreparedStatement preparedArtifactStoreExistedQuery;
private PreparedStatement preparedArtifactStoresQueryByKeys;
private PreparedStatement preparedAffectedStoresQuery;
private PreparedStatement preparedAffectedStoresIncrement;
private PreparedStatement preparedAffectedStoresReduction;
private PreparedStatement preparedAffectedStoreExistedQuery;
private PreparedStatement preparedAffectedStoreDel;
public CassandraStoreQuery() {}
public CassandraStoreQuery( CassandraClient client, IndyStoreManagerConfig config, IndyConfiguration indyConfig )
{
this.client = client;
this.config = config;
this.indyConfig = indyConfig;
init();
}
@PostConstruct
public void init()
{
String keySpace = config.getKeyspace();
session = client.getSession( keySpace );
session.execute( SchemaUtils.getSchemaCreateKeyspace( keySpace, indyConfig.getKeyspaceReplicas() ));
session.execute( CassandraStoreUtil.getSchemaCreateTableStore( keySpace ) );
session.execute( CassandraStoreUtil.getSchemaCreateIndex4Store( keySpace ) );
session.execute( CassandraStoreUtil.getSchemaCreateTableAffectedStore( keySpace ) );
MappingManager manager = new MappingManager( session );
storeMapper = manager.mapper( DtxArtifactStore.class, keySpace );
preparedSingleArtifactStoreQuery = session.prepare(
"SELECT packagetype, storeType, namehashprefix, name, description, transientMetadata, metadata, disabled, disableTimeout, pathStyle, pathMaskPatterns, authoritativeIndex, createTime, rescanInProgress, extras FROM "
+ keySpace + "." + TABLE_STORE + " WHERE typekey=? AND namehashprefix=? AND name=?" );
preparedArtifactStoresQuery = session.prepare(
"SELECT packagetype, storeType, namehashprefix, name, description, transientMetadata, metadata, disabled, disableTimeout, pathStyle, pathMaskPatterns, authoritativeIndex, createTime, rescanInProgress, extras FROM "
+ keySpace + "." + TABLE_STORE );
preparedArtifactStoresQueryByKeys = session.prepare(
"SELECT packagetype, storeType, namehashprefix, name, description, transientMetadata, metadata, disabled, disableTimeout, pathStyle, pathMaskPatterns, authoritativeIndex, createTime, rescanInProgress, extras FROM "
+ keySpace + "." + TABLE_STORE + " WHERE typekey=?" );
preparedArtifactStoreExistedQuery = session.prepare( "SELECT name FROM " + keySpace + "." + TABLE_STORE + " LIMIT 1");
preparedArtifactStoreDel = session.prepare( "DELETE FROM " + keySpace + "." + TABLE_STORE + " WHERE typekey=? AND namehashprefix=? AND name=? IF EXISTS" );
preparedAffectedStoresQuery = session.prepare( "SELECT key, affectedStores FROM " + keySpace + "." + TABLE_AFFECTED_STORE + " WHERE key=? ");
preparedAffectedStoresIncrement =
session.prepare( "UPDATE " + keySpace + "." + TABLE_AFFECTED_STORE + " SET affectedStores = affectedStores + ? WHERE key=?" );
preparedAffectedStoresReduction =
session.prepare( "UPDATE " + keySpace + "." + TABLE_AFFECTED_STORE + " SET affectedStores = affectedStores - ? WHERE key=?" );
preparedAffectedStoreExistedQuery = session.prepare( "SELECT key FROM " + keySpace + "." + TABLE_AFFECTED_STORE + " LIMIT 1");
preparedAffectedStoreDel = session.prepare( "DELETE FROM " + keySpace + "." + TABLE_AFFECTED_STORE + " WHERE key=? " );
}
public DtxArtifactStore getArtifactStore( String packageType, StoreType type, String name )
{
BoundStatement bound = preparedSingleArtifactStoreQuery.bind(
CassandraStoreUtil.getTypeKey( packageType, type.name() ),
CassandraStoreUtil.getHashPrefix( name ), name );
ResultSet result = session.execute( bound );
return toDtxArtifactStore( result.one() );
}
public Set<DtxArtifactStore> getArtifactStoresByPkgAndType( String packageType, StoreType type )
{
BoundStatement bound = preparedArtifactStoresQueryByKeys.bind(
CassandraStoreUtil.getTypeKey( packageType, type.name() ) );
ResultSet result = session.execute( bound );
Set<DtxArtifactStore> dtxArtifactStoreSet = new HashSet<>( );
result.forEach( row -> {
dtxArtifactStoreSet.add( toDtxArtifactStore( row ) );
} );
return dtxArtifactStoreSet;
}
public Set<DtxArtifactStore> getAllArtifactStores()
{
BoundStatement bound = preparedArtifactStoresQuery.bind();
ResultSet result = session.execute( bound );
Set<DtxArtifactStore> dtxArtifactStoreSet = new HashSet<>( );
result.forEach( row -> {
dtxArtifactStoreSet.add( toDtxArtifactStore( row ) );
} );
return dtxArtifactStoreSet;
}
public Boolean isEmpty()
{
BoundStatement bound = preparedArtifactStoresQuery.bind();
ResultSet result = session.execute( bound );
return result.one() == null;
}
public DtxArtifactStore removeArtifactStore( String packageType, StoreType type, String name )
{
DtxArtifactStore dtxArtifactStore = getArtifactStore( packageType, type, name );
if ( dtxArtifactStore != null )
{
BoundStatement bound = preparedArtifactStoreDel.bind( CassandraStoreUtil.getTypeKey( packageType, type.name() ), CassandraStoreUtil.getHashPrefix( name ), name );
session.execute( bound );
}
return dtxArtifactStore;
}
private DtxArtifactStore toDtxArtifactStore( Row row )
{
if ( row == null )
{
return null;
}
DtxArtifactStore store = new DtxArtifactStore();
store.setPackageType( row.getString( CassandraStoreUtil.PACKAGE_TYPE ) );
store.setStoreType( row.getString( CassandraStoreUtil.STORE_TYPE ) );
store.setName( row.getString( CassandraStoreUtil.NAME ) );
store.setNameHashPrefix( row.getInt( CassandraStoreUtil.NAME_HASH_PREFIX ) );
store.setPathMaskPatterns( row.getSet( CassandraStoreUtil.PATH_MASK_PATTERNS, String.class ) );
store.setPathStyle( row.getString( CassandraStoreUtil.PATH_STYLE ) );
store.setDisabled( row.getBool( CassandraStoreUtil.DISABLED ) );
store.setDescription( row.getString( CassandraStoreUtil.DESCRIPTION ) );
store.setAuthoritativeIndex( row.getBool( CassandraStoreUtil.AUTHORITATIVE_INDEX ) );
store.setCreateTime( row.getString( CassandraStoreUtil.CREATE_TIME ) );
store.setDisableTimeout( row.getInt( CassandraStoreUtil.DISABLE_TIMEOUT ) );
store.setMetadata( row.getMap( CassandraStoreUtil.METADATA, String.class, String.class ) );
store.setRescanInProgress( row.getBool( CassandraStoreUtil.RESCAN_IN_PROGRESS ) );
store.setTransientMetadata( row.getMap( CassandraStoreUtil.TRANSIENT_METADATA, String.class, String.class ) );
store.setExtras( row.getMap( CassandraStoreUtil.EXTRAS, String.class, String.class ) );
return store;
}
public void createDtxArtifactStore( DtxArtifactStore dtxArtifactStore )
{
storeMapper.save( dtxArtifactStore );
}
public DtxAffectedStore getAffectedStore( StoreKey key )
{
BoundStatement bound = preparedAffectedStoresQuery.bind(key.toString());
ResultSet result = session.execute( bound );
return toDtxAffectedStore( result.one() );
}
private DtxAffectedStore toDtxAffectedStore( Row row )
{
if ( row == null )
{
return null;
}
DtxAffectedStore store = new DtxAffectedStore();
store.setKey( row.getString( CassandraStoreUtil.KEY ) );
store.setAffectedStores( row.getSet( CassandraStoreUtil.AFFECTED_STORES, String.class ) );
return store;
}
public void addAffectedBy( StoreKey storeKey, StoreKey affected )
{
BoundStatement bound = preparedAffectedStoresIncrement.bind();
Set<String> increment = new HashSet();
increment.add( affected.toString() );
bound.setSet( 0, increment );
bound.setString( 1, storeKey.toString() );
session.execute( bound );
}
public void removeAffectedBy( StoreKey storeKey, StoreKey affected )
{
BoundStatement bound = preparedAffectedStoresReduction.bind();
Set<String> reduction = new HashSet();
reduction.add( affected.toString() );
bound.setSet( 0, reduction );
bound.setString( 1, storeKey.toString() );
session.execute( bound );
}
public Boolean isAffectedEmpty()
{
BoundStatement bound = preparedAffectedStoreExistedQuery.bind();
ResultSet result = session.execute( bound );
return result.one() == null;
}
public void removeAffectedStore( StoreKey key )
{
DtxAffectedStore affectedStore = getAffectedStore( key );
if ( affectedStore != null )
{
BoundStatement bound = preparedAffectedStoreDel.bind( key.toString() );
session.execute( bound );
}
}
}
| |
package io.github.yangxlei.bjnetwork;
/*
* Copyright (C) 2015 Square, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.EOFException;
import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.charset.UnsupportedCharsetException;
import java.util.concurrent.TimeUnit;
import okhttp3.Connection;
import okhttp3.Headers;
import okhttp3.Interceptor;
import okhttp3.MediaType;
import okhttp3.OkHttpClient;
import okhttp3.Protocol;
import okhttp3.Request;
import okhttp3.RequestBody;
import okhttp3.Response;
import okhttp3.ResponseBody;
import okhttp3.internal.Platform;
import okhttp3.internal.http.HttpEngine;
import okio.Buffer;
import okio.BufferedSource;
import static okhttp3.internal.Platform.INFO;
/**
* An OkHttp interceptor which logs request and response information. Can be applied as an
* {@linkplain OkHttpClient#interceptors() application interceptor} or as a {@linkplain
* OkHttpClient#networkInterceptors() network interceptor}. <p> The format of the logs created by
* this class should not be considered stable and may change slightly between releases. If you need
* a stable logging format, use your own interceptor.
*/
public final class HttpLoggingInterceptor implements Interceptor {
private static final Charset UTF8 = Charset.forName("UTF-8");
public enum Level {
/** No logs. */
NONE,
/**
* Logs request and response lines.
*
* <p>Example:
* <pre>{@code
* --> POST /greeting http/1.1 (3-byte body)
*
* <-- 200 OK (22ms, 6-byte body)
* }</pre>
*/
BASIC,
/**
* Logs request and response lines and their respective headers.
*
* <p>Example:
* <pre>{@code
* --> POST /greeting http/1.1
* Host: example.com
* Content-Type: plain/text
* Content-Length: 3
* --> END POST
*
* <-- 200 OK (22ms)
* Content-Type: plain/text
* Content-Length: 6
* <-- END HTTP
* }</pre>
*/
HEADERS,
/**
* Logs request and response lines and their respective headers and bodies (if present).
*
* <p>Example:
* <pre>{@code
* --> POST /greeting http/1.1
* Host: example.com
* Content-Type: plain/text
* Content-Length: 3
*
* Hi?
* --> END GET
*
* <-- 200 OK (22ms)
* Content-Type: plain/text
* Content-Length: 6
*
* Hello!
* <-- END HTTP
* }</pre>
*/
BODY
}
public interface Logger {
void log(String message);
/** A {@link Logger} defaults output appropriate for the current platform. */
Logger DEFAULT = new Logger() {
@Override public void log(String message) {
Platform.get().log(INFO, message, null);
}
};
}
public HttpLoggingInterceptor() {
this(Logger.DEFAULT);
}
public HttpLoggingInterceptor(Logger logger) {
this.logger = logger;
}
private final Logger logger;
private volatile Level level = Level.NONE;
/** Change the level at which this interceptor logs. */
public HttpLoggingInterceptor setLevel(Level level) {
if (level == null) throw new NullPointerException("level == null. Use Level.NONE instead.");
this.level = level;
return this;
}
public Level getLevel() {
return level;
}
@Override public Response intercept(Chain chain) throws IOException {
Level level = this.level;
Request request = chain.request();
if (level == Level.NONE) {
return chain.proceed(request);
}
boolean logBody = level == Level.BODY;
boolean logHeaders = logBody || level == Level.HEADERS;
RequestBody requestBody = request.body();
boolean hasRequestBody = requestBody != null;
Connection connection = chain.connection();
Protocol protocol = connection != null ? connection.protocol() : Protocol.HTTP_1_1;
String requestStartMessage = "--> " + request.method() + ' ' + request.url() + ' ' + protocol;
if (!logHeaders && hasRequestBody) {
requestStartMessage += " (" + requestBody.contentLength() + "-byte body)";
}
logger.log(requestStartMessage);
if (logHeaders) {
if (hasRequestBody) {
// Request body headers are only present when installed as a network interceptor. Force
// them to be included (when available) so there values are known.
if (requestBody.contentType() != null) {
logger.log("Content-Type: " + requestBody.contentType());
}
if (requestBody.contentLength() != -1) {
logger.log("Content-Length: " + requestBody.contentLength());
}
}
Headers headers = request.headers();
for (int i = 0, count = headers.size(); i < count; i++) {
String name = headers.name(i);
// Skip headers from the request body as they are explicitly logged above.
if (!"Content-Type".equalsIgnoreCase(name) && !"Content-Length".equalsIgnoreCase(name)) {
logger.log(name + ": " + headers.value(i));
}
}
if (!logBody || !hasRequestBody) {
logger.log("--> END " + request.method());
} else if (bodyEncoded(request.headers())) {
logger.log("--> END " + request.method() + " (encoded body omitted)");
} else {
Buffer buffer = new Buffer();
requestBody.writeTo(buffer);
Charset charset = UTF8;
MediaType contentType = requestBody.contentType();
if (contentType != null) {
charset = contentType.charset(UTF8);
}
logger.log("");
if (isPlaintext(buffer)) {
logger.log(buffer.readString(charset));
logger.log("--> END " + request.method()
+ " (" + requestBody.contentLength() + "-byte body)");
} else {
logger.log("--> END " + request.method() + " (binary "
+ requestBody.contentLength() + "-byte body omitted)");
}
}
}
long startNs = System.nanoTime();
Response response;
try {
response = chain.proceed(request);
} catch (Exception e) {
logger.log("<-- HTTP FAILED: " + e);
throw e;
}
long tookMs = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNs);
ResponseBody responseBody = response.body();
long contentLength = responseBody.contentLength();
String bodySize = contentLength != -1 ? contentLength + "-byte" : "unknown-length";
logger.log("<-- " + response.code() + ' ' + response.message() + ' '
+ response.request().url() + " (" + tookMs + "ms" + (!logHeaders ? ", "
+ bodySize + " body" : "") + ')');
if (logHeaders) {
Headers headers = response.headers();
for (int i = 0, count = headers.size(); i < count; i++) {
logger.log(headers.name(i) + ": " + headers.value(i));
}
if (!logBody || !HttpEngine.hasBody(response)) {
logger.log("<-- END HTTP");
} else if (bodyEncoded(response.headers())) {
logger.log("<-- END HTTP (encoded body omitted)");
} else {
BufferedSource source = responseBody.source();
source.request(Long.MAX_VALUE); // Buffer the entire body.
Buffer buffer = source.buffer();
Charset charset = UTF8;
MediaType contentType = responseBody.contentType();
if (contentType != null) {
try {
charset = contentType.charset(UTF8);
} catch (UnsupportedCharsetException e) {
logger.log("");
logger.log("Couldn't decode the response body; charset is likely malformed.");
logger.log("<-- END HTTP");
return response;
}
}
if (!isPlaintext(buffer)) {
logger.log("");
logger.log("<-- END HTTP (binary " + buffer.size() + "-byte body omitted)");
return response;
}
if (contentLength != 0) {
logger.log("");
logger.log(buffer.clone().readString(charset));
}
logger.log("<-- END HTTP (" + buffer.size() + "-byte body)");
}
}
return response;
}
/**
* Returns true if the body in question probably contains human readable text. Uses a small sample
* of code points to detect unicode control characters commonly used in binary file signatures.
*/
static boolean isPlaintext(Buffer buffer) throws EOFException {
try {
Buffer prefix = new Buffer();
long byteCount = buffer.size() < 64 ? buffer.size() : 64;
buffer.copyTo(prefix, 0, byteCount);
for (int i = 0; i < 16; i++) {
if (prefix.exhausted()) {
break;
}
int codePoint = prefix.readUtf8CodePoint();
if (Character.isISOControl(codePoint) && !Character.isWhitespace(codePoint)) {
return false;
}
}
return true;
} catch (EOFException e) {
return false; // Truncated UTF-8 sequence.
}
}
private boolean bodyEncoded(Headers headers) {
String contentEncoding = headers.get("Content-Encoding");
return contentEncoding != null && !contentEncoding.equalsIgnoreCase("identity");
}
}
| |
package net.incivility.fantastchat.adapter;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.os.AsyncTask;
import android.util.LruCache;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AbsListView;
import android.widget.ArrayAdapter;
import android.widget.GridView;
import android.widget.ImageView;
import net.incivility.fantastchat.utils.ImageUtil;
import net.incivility.fantastchat.R;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.Executor;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
/**
* Created by Administrator on 2016/4/19.
*/
public class ChoosePictureAdapter extends ArrayAdapter<String> implements AbsListView.OnScrollListener {
private GridView mGridView;
private LruCache<String,Bitmap> mMemoryCache;
private int mFirstVisibeItem;
private int mVisibleItemCount;
private boolean isFirstEnter=true;
private Set<BitmapWorkTask> tasks;
private List<String> list;
private Executor exec;
public ChoosePictureAdapter(Context context, int resourceId, List<String> objects , GridView gridView) {
super(context, resourceId,objects);
mGridView=gridView;
int maxMemory=(int)Runtime.getRuntime().maxMemory();
int cacheSize=maxMemory/8;
list=objects;
mMemoryCache=new LruCache<String, Bitmap>(cacheSize){
@Override
protected int sizeOf(String key, Bitmap value) {
return value.getByteCount();
}
};
mGridView.setOnScrollListener(this);
tasks=new HashSet<>();
exec=new ThreadPoolExecutor(8,128,10, TimeUnit.SECONDS,new LinkedBlockingDeque<Runnable>());
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
final String path=getItem(position);
View view;
ViewHold viewHold;
if (convertView==null)
{
view= LayoutInflater.from(getContext()).inflate(R.layout.photo_image_view,null);
viewHold=new ViewHold();
viewHold.photo=(ImageView)view.findViewById(R.id.choose_pic_photo);
view.setTag(viewHold);
}
else {
view = convertView;
viewHold=(ViewHold)view.getTag();
}
viewHold.photo.setTag(path);
setImageView(path,viewHold.photo);
return view;
}
private static class ViewHold
{
ImageView photo;
}
private void setImageView(String path,ImageView imageView)
{
Bitmap bitmap=getBitmapFromMemoryCache(path);
if (bitmap!=null)
{
imageView.setImageBitmap(bitmap);
}
else
{
imageView.setImageResource(R.drawable.choose_pic_empty);
}
}
public void addBitmapToMemoryCache(String key,Bitmap bitmap)
{
if (getBitmapFromMemoryCache(key)==null)
mMemoryCache.put(key,bitmap);
}
public Bitmap getBitmapFromMemoryCache(String key)
{
return mMemoryCache.get(key);
}
@Override
public void onScrollStateChanged(AbsListView view, int scrollState) {
if (scrollState==SCROLL_STATE_IDLE)
loadBitMap(mFirstVisibeItem,mVisibleItemCount);
else
{
cancellAllTask();
}
}
@Override
public void onScroll(AbsListView view, int firstVisibleItem, int visibleItemCount, int totalItemCount) {
mFirstVisibeItem=firstVisibleItem;
mVisibleItemCount=visibleItemCount;
if (isFirstEnter&&visibleItemCount>0)
{
loadBitMap(firstVisibleItem,visibleItemCount);
isFirstEnter=false;
}
}
private void loadBitMap(int firstVisibeItem,int visibleItemCount)
{
for (int i=firstVisibeItem;i<firstVisibeItem+visibleItemCount;i++)
{
String path=list.get(i);
Bitmap bitmap=getBitmapFromMemoryCache(path);
if (bitmap==null)
{
BitmapWorkTask task=new BitmapWorkTask();
tasks.add(task);
task.executeOnExecutor(exec,path);
}
else {
ImageView imageView = (ImageView) mGridView.findViewWithTag(path);
if (imageView != null && bitmap != null) {
imageView.setImageBitmap(bitmap);
}
}
}
}
public void cancellAllTask()
{
if (tasks!=null&&!tasks.isEmpty())
{
for (BitmapWorkTask task:tasks)
task.cancel(false);
}
}
class BitmapWorkTask extends AsyncTask<String ,Void,Bitmap>
{
private String path;
@Override
protected Bitmap doInBackground(String... params) {
path=params[0];
Bitmap bitmap;
if (path.equals("choose_pic_take_photo"))
{
bitmap= BitmapFactory.decodeResource(getContext().getResources(),R.drawable.choose_pic_take_photo);
}
else
{
bitmap= ImageUtil.decodeBitmap(params[0]);
}
if (bitmap!=null) {
addBitmapToMemoryCache(params[0], bitmap);
}
return bitmap;
}
@Override
protected void onPostExecute(Bitmap bitmap) {
super.onPostExecute(bitmap);
ImageView imageView = (ImageView) mGridView.findViewWithTag(path);
if (imageView != null && bitmap != null) {
imageView.setImageBitmap(bitmap);
}
tasks.remove(this);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache;
import java.io.Serializable;
import java.util.Collection;
import javax.cache.CacheException;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteException;
import org.apache.ignite.cluster.ClusterNode;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.configuration.TopologyValidator;
import org.apache.ignite.transactions.Transaction;
/**
* Topology validator test.
*/
public abstract class IgniteTopologyValidatorAbstractCacheTest extends IgniteCacheAbstractTest implements Serializable {
/** key-value used at test. */
protected static String KEY_VAL = "1";
/** cache name 1. */
protected static String CACHE_NAME_1 = "cache1";
/** cache name 2. */
protected static String CACHE_NAME_2 = "cache2";
/** {@inheritDoc} */
@Override protected int gridCount() {
return 1;
}
/** {@inheritDoc} */
@Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception {
IgniteConfiguration iCfg = super.getConfiguration(igniteInstanceName);
CacheConfiguration cCfg0 = cacheConfiguration(igniteInstanceName);
CacheConfiguration cCfg1 = cacheConfiguration(igniteInstanceName);
cCfg1.setName(CACHE_NAME_1);
CacheConfiguration cCfg2 = cacheConfiguration(igniteInstanceName);
cCfg2.setName(CACHE_NAME_2);
iCfg.setCacheConfiguration(cCfg0, cCfg1, cCfg2);
for (CacheConfiguration cCfg : iCfg.getCacheConfiguration()) {
if (cCfg.getName() != null)
if (cCfg.getName().equals(CACHE_NAME_1))
cCfg.setTopologyValidator(new TopologyValidator() {
@Override public boolean validate(Collection<ClusterNode> nodes) {
return nodes.size() == 2;
}
});
else if (cCfg.getName().equals(CACHE_NAME_2))
cCfg.setTopologyValidator(new TopologyValidator() {
@Override public boolean validate(Collection<ClusterNode> nodes) {
return nodes.size() >= 2;
}
});
}
return iCfg;
}
/**
* Puts when topology is invalid.
*
* @param cacheName cache name.
*/
protected void putInvalid(String cacheName) {
try {
grid(0).cache(cacheName).put(KEY_VAL, KEY_VAL);
assert false : "topology validation broken";
}
catch (CacheException ex) {
assert ex.getCause() instanceof IgniteCheckedException &&
ex.getCause().getMessage().contains("cache topology is not valid");
}
}
/**
* Puts when topology is valid.
*
* @param cacheName cache name.
*/
protected void putValid(String cacheName) {
try {
grid(0).cache(cacheName).put(KEY_VAL, KEY_VAL);
assert grid(0).cache(cacheName).get(KEY_VAL).equals(KEY_VAL);
}
catch (CacheException ignored) {
assert false : "topology validation broken";
}
}
/**
* Gets when topology is invalid.
*
* @param cacheName cache name.
*/
protected void getInvalid(String cacheName) {
try {
assert grid(0).cache(cacheName).get(KEY_VAL).equals(KEY_VAL);
}
catch (CacheException ignored) {
assert false : "topology validation broken";
}
}
/**
* Remove when topology is invalid.
*
* @param cacheName cache name.
*/
protected void removeInvalid(String cacheName) {
try {
grid(0).cache(cacheName).remove(KEY_VAL);
assert false : "topology validation broken";
}
catch (CacheException ex) {
assert ex.getCause() instanceof IgniteCheckedException &&
ex.getCause().getMessage().contains("cache topology is not valid");
}
}
/**
* Commits with error.
*
* @param tx transaction.
*/
protected void commitFailed(Transaction tx) {
try {
tx.commit();
}
catch (IgniteException ex) {
assert ex.getCause() instanceof IgniteCheckedException &&
ex.getCause().getMessage().contains("cache topology is not valid");
}
}
/**
* Removes key-value.
*
* @param cacheName cache name.
*/
public void remove(String cacheName) {
assert grid(0).cache(cacheName).get(KEY_VAL) != null;
grid(0).cache(cacheName).remove(KEY_VAL);
}
/**
* Asserts that cache doesn't contains key.
*
* @param cacheName cache name.
*/
public void assertEmpty(String cacheName) {
assert grid(0).cache(cacheName).get(KEY_VAL) == null;
}
/** topology validator test. */
public void testTopologyValidator() throws Exception {
putValid(null);
remove(null);
putInvalid(CACHE_NAME_1);
removeInvalid(CACHE_NAME_1);
putInvalid(CACHE_NAME_2);
removeInvalid(CACHE_NAME_2);
startGrid(1);
putValid(null);
remove(null);
putValid(CACHE_NAME_1);
putValid(CACHE_NAME_2);
remove(CACHE_NAME_2);
startGrid(2);
putValid(null);
remove(null);
getInvalid(CACHE_NAME_1);
putInvalid(CACHE_NAME_1);
removeInvalid(CACHE_NAME_1);
putValid(CACHE_NAME_2);
remove(CACHE_NAME_2);
}
}
| |
package main.utils;
import java.awt.Component;
import java.lang.annotation.Annotation;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.LinkedList;
import java.util.List;
import javax.swing.JCheckBox;
import javax.swing.JComboBox;
import javax.swing.JSpinner;
import gui.utils.GUIErrorHandler;
/**
* Contains utility methods using reflection.
*
* @author Maxime PIA
*/
public class ReflectionUtils {
/**
* Finds the getters of a class.
*
* @param c
* The class.
* @return The getters of the class.
*/
public static List<Method> getGetters(Class<?> c) {
Method[] allMethods = c.getDeclaredMethods();
List<Method> getters = new LinkedList<>();
for (Method m : allMethods) {
if (((m.getReturnType().equals(boolean.class)
&& m.getName().startsWith("is"))
|| m.getName().startsWith("get"))
&& m.getParameterTypes().length == 0) {
getters.add(m);
}
}
return getters;
}
/**
* Finds the setters of a class.
*
* @param c
* The class.
* @return The setters of the class.
*/
public static List<Method> getSetters(Class<?> c) {
return getSetters(c, false);
}
private static List<Method> getSetters(Class<?> c, boolean flag) {
Method[] allMethods = c.getDeclaredMethods();
List<Method> setters = new LinkedList<>();
for (Method m : allMethods) {
Class<?>[] types = m.getParameterTypes();
if (m.getName().startsWith("set")
&& types.length == (flag ? 2 : 1)) {
if ((flag && types[1].equals(boolean.class)) || !flag) {
setters.add(m);
}
}
}
return setters;
}
private static Method getAccessor(Class<?> c, Field field, boolean setter,
boolean flag) {
String accessorSuffix;
int modifiers = field.getModifiers();
if (Modifier.isFinal(modifiers) && Modifier.isStatic(modifiers)) {
accessorSuffix = StringUtils.screamingSnakeToWords(field.getName());
} else {
accessorSuffix = StringUtils.camelToWords(field.getName());
}
accessorSuffix = StringUtils.wordsToScreamingCamel(accessorSuffix);
List<Method> accessors = setter ? getSetters(c, flag) : getGetters(c);
for (Method accessor : accessors) {
if (accessor.getName().endsWith(accessorSuffix)) {
return accessor;
}
}
return null;
}
/**
* Finds the setter of a field.
*
* @param c
* The class containing the field.
* @param field
* The field.
* @return The setter of the field.
*/
public static Method getSetter(Class<?> c, Field field) {
return getAccessor(c, field, true, false);
}
/**
* Finds the getter of a field.
*
* @param c
* The class containing the field.
* @param field
* The field.
* @return The getter of the field.
*/
public static Method getGetter(Class<?> c, Field field) {
return getAccessor(c, field, false, false);
}
/**
* Finds the setter of a field containing flags.
*
* @param c
* The class containing the field.
* @param field
* The field.
* @return The setter of the field.
*/
public static Method getFlagSetter(Class<?> c, Field field) {
return getAccessor(c, field, true, true);
}
/**
* Finds the fields annotated by a given annotation.
*
* @param c
* The class containing the fields.
* @param annotationClass
* The annotation.
* @return The fields annotated by the annotation.
*/
public static List<Field> getAnnotatedFields(Class<?> c,
Class<? extends Annotation> annotationClass) {
Field[] fields = c.getDeclaredFields();
List<Field> annotatedFields = new LinkedList<>();
for (Field field : fields) {
if (field.getAnnotation(annotationClass) != null) {
annotatedFields.add(field);
}
}
return annotatedFields;
}
/**
* Finds the annotation objects of annotated fields.
*
* @param c
* The class containing the fields.
* @param annotationClass
* The annotation.
* @return The annotations of the annotated fields.
*/
public static <T extends Annotation> List<T> getAnnotations(Class<?> c,
Class<T> annotationClass) {
Field[] fields = c.getDeclaredFields();
List<T> annotations = new LinkedList<>();
for (Field field : fields) {
T annotation = field.getAnnotation(annotationClass);
if (annotation != null) {
annotations.add(annotation);
}
}
return annotations;
}
/**
* Finds the fields with the given modifiers.
*
* @param c
* The class containing the fields.
* @param modifiers
* The modifiers.
* @return The fields with the given modifiers.
*/
public static List<Field> getModifiedFields(Class<?> c, int modifiers) {
Field[] fields = c.getDeclaredFields();
List<Field> modifiedFields = new LinkedList<>();
for (Field field : fields) {
if ((field.getModifiers() & modifiers) > 0) {
modifiedFields.add(field);
}
}
return modifiedFields;
}
private static Method getGUIComponentValueAccessor(Component elt,
boolean setter) {
String accessorPrefix = setter ? "set" : "get";
Class<?>[] args = new Class<?>[0];
try {
if (elt instanceof JCheckBox) {
if (setter) {
args = new Class<?>[]{boolean.class};
} else {
accessorPrefix = "is";
}
return elt.getClass().getMethod(
accessorPrefix + "Selected",
args
);
} else if (elt instanceof JComboBox
|| elt instanceof JSpinner) {
if (setter) {
args = new Class<?>[]{Object.class};
}
String accessorSuffix = elt instanceof JComboBox
? "SelectedItem"
: "Value";
return elt.getClass().getMethod(
accessorPrefix + accessorSuffix,
args
);
}
} catch (NoSuchMethodException | SecurityException e) {
new GUIErrorHandler(e);
}
return null;
}
/**
* Finds the getter on the value contained in a component.
*
* @param elt
* The graphical component containing a value.
* @return The getter on the value contained in the component.
*/
public static Method getGUIComponentValueGetter(Component elt) {
return getGUIComponentValueAccessor(elt, false);
}
/**
* Finds the setter on the value contained in a component.
*
* @param elt
* The graphical component containing a value.
* @return The setter on the value contained in the component.
*/
public static Method getGUIComponentValueSetter(Component elt) {
return getGUIComponentValueAccessor(elt, true);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.store.easy.json;
import java.io.IOException;
import java.io.InputStream;
import java.util.List;
import com.google.common.collect.Lists;
import org.apache.drill.common.exceptions.ExecutionSetupException;
import org.apache.drill.common.exceptions.UserException;
import org.apache.drill.common.expression.SchemaPath;
import org.apache.drill.exec.ExecConstants;
import org.apache.drill.exec.exception.OutOfMemoryException;
import org.apache.drill.exec.ops.FragmentContext;
import org.apache.drill.exec.ops.OperatorContext;
import org.apache.drill.exec.physical.impl.OutputMutator;
import org.apache.drill.exec.store.AbstractRecordReader;
import org.apache.drill.exec.store.dfs.DrillFileSystem;
import org.apache.drill.exec.store.easy.json.JsonProcessor.ReadState;
import org.apache.drill.exec.store.easy.json.reader.CountingJsonReader;
import org.apache.drill.exec.vector.BaseValueVector;
import org.apache.drill.exec.vector.complex.fn.JsonReader;
import org.apache.drill.exec.vector.complex.impl.VectorContainerWriter;
import org.apache.hadoop.fs.Path;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.databind.JsonNode;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
public class JSONRecordReader extends AbstractRecordReader {
private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(JSONRecordReader.class);
public static final long DEFAULT_ROWS_PER_BATCH = BaseValueVector.INITIAL_VALUE_ALLOCATION;
private VectorContainerWriter writer;
// Data we're consuming
private Path hadoopPath;
private JsonNode embeddedContent;
private InputStream stream;
private final DrillFileSystem fileSystem;
private JsonProcessor jsonReader;
private int recordCount;
private long runningRecordCount = 0;
private final FragmentContext fragmentContext;
private final boolean enableAllTextMode;
private final boolean readNumbersAsDouble;
private final boolean unionEnabled;
private long parseErrorCount;
private final boolean skipMalformedJSONRecords;
private final boolean printSkippedMalformedJSONRecordLineNumber;
ReadState write = null;
/**
* Create a JSON Record Reader that uses a file based input stream.
* @param fragmentContext
* @param inputPath
* @param fileSystem
* @param columns pathnames of columns/subfields to read
* @throws OutOfMemoryException
*/
public JSONRecordReader(final FragmentContext fragmentContext, final String inputPath, final DrillFileSystem fileSystem,
final List<SchemaPath> columns) throws OutOfMemoryException {
this(fragmentContext, inputPath, null, fileSystem, columns);
}
/**
* Create a new JSON Record Reader that uses a in memory materialized JSON stream.
* @param fragmentContext
* @param embeddedContent
* @param fileSystem
* @param columns pathnames of columns/subfields to read
* @throws OutOfMemoryException
*/
public JSONRecordReader(final FragmentContext fragmentContext, final JsonNode embeddedContent,
final DrillFileSystem fileSystem, final List<SchemaPath> columns) throws OutOfMemoryException {
this(fragmentContext, null, embeddedContent, fileSystem, columns);
}
private JSONRecordReader(final FragmentContext fragmentContext, final String inputPath,
final JsonNode embeddedContent, final DrillFileSystem fileSystem,
final List<SchemaPath> columns) {
Preconditions.checkArgument(
(inputPath == null && embeddedContent != null) ||
(inputPath != null && embeddedContent == null),
"One of inputPath or embeddedContent must be set but not both."
);
if(inputPath != null) {
this.hadoopPath = new Path(inputPath);
} else {
this.embeddedContent = embeddedContent;
}
this.fileSystem = fileSystem;
this.fragmentContext = fragmentContext;
// only enable all text mode if we aren't using embedded content mode.
this.enableAllTextMode = embeddedContent == null && fragmentContext.getOptions().getOption(ExecConstants.JSON_READER_ALL_TEXT_MODE_VALIDATOR);
this.readNumbersAsDouble = embeddedContent == null && fragmentContext.getOptions().getOption(ExecConstants.JSON_READ_NUMBERS_AS_DOUBLE_VALIDATOR);
this.unionEnabled = embeddedContent == null && fragmentContext.getOptions().getOption(ExecConstants.ENABLE_UNION_TYPE);
this.skipMalformedJSONRecords = fragmentContext.getOptions().getOption(ExecConstants.JSON_SKIP_MALFORMED_RECORDS_VALIDATOR);
this.printSkippedMalformedJSONRecordLineNumber = fragmentContext.getOptions().getOption(ExecConstants.JSON_READER_PRINT_INVALID_RECORDS_LINE_NOS_FLAG_VALIDATOR);
setColumns(columns);
}
@Override
public String toString() {
return super.toString()
+ "[hadoopPath = " + hadoopPath
+ ", recordCount = " + recordCount
+ ", parseErrorCount = " + parseErrorCount
+ ", runningRecordCount = " + runningRecordCount + ", ...]";
}
@Override
public void setup(final OperatorContext context, final OutputMutator output) throws ExecutionSetupException {
try{
if (hadoopPath != null) {
this.stream = fileSystem.openPossiblyCompressedStream(hadoopPath);
}
this.writer = new VectorContainerWriter(output, unionEnabled);
if (isSkipQuery()) {
this.jsonReader = new CountingJsonReader(fragmentContext.getManagedBuffer());
} else {
this.jsonReader = new JsonReader(fragmentContext.getManagedBuffer(), ImmutableList.copyOf(getColumns()), enableAllTextMode, true, readNumbersAsDouble);
}
setupParser();
}catch(final Exception e){
handleAndRaise("Failure reading JSON file", e);
}
}
protected List<SchemaPath> getDefaultColumnsToRead() {
return ImmutableList.of();
}
private void setupParser() throws IOException {
if(hadoopPath != null){
jsonReader.setSource(stream);
}else{
jsonReader.setSource(embeddedContent);
}
jsonReader.setIgnoreJSONParseErrors(skipMalformedJSONRecords);
}
protected void handleAndRaise(String suffix, Exception e) throws UserException {
String message = e.getMessage();
int columnNr = -1;
if (e instanceof JsonParseException) {
final JsonParseException ex = (JsonParseException) e;
message = ex.getOriginalMessage();
columnNr = ex.getLocation().getColumnNr();
}
UserException.Builder exceptionBuilder = UserException.dataReadError(e)
.message("%s - %s", suffix, message);
if (columnNr > 0) {
exceptionBuilder.pushContext("Column ", columnNr);
}
if (hadoopPath != null) {
exceptionBuilder.pushContext("Record ", currentRecordNumberInFile())
.pushContext("File ", hadoopPath.toUri().getPath());
}
throw exceptionBuilder.build(logger);
}
private long currentRecordNumberInFile() {
return runningRecordCount + recordCount + 1;
}
@Override
public int next() {
writer.allocate();
writer.reset();
recordCount = 0;
parseErrorCount = 0;
if(write == ReadState.JSON_RECORD_PARSE_EOF_ERROR){
return recordCount;
}
outside: while(recordCount < DEFAULT_ROWS_PER_BATCH){
try{
writer.setPosition(recordCount);
write = jsonReader.write(writer);
if(write == ReadState.WRITE_SUCCEED){
recordCount++;
}
else if(write == ReadState.JSON_RECORD_PARSE_ERROR || write == ReadState.JSON_RECORD_PARSE_EOF_ERROR){
if(skipMalformedJSONRecords == false){
handleAndRaise("Error parsing JSON", new Exception(hadoopPath.getName() + " : line nos :" + (recordCount+1)));
}
++parseErrorCount;
if(printSkippedMalformedJSONRecordLineNumber){
logger.debug("Error parsing JSON in " + hadoopPath.getName() + " : line nos :" + (recordCount+parseErrorCount));
}
if(write == ReadState.JSON_RECORD_PARSE_EOF_ERROR){
break outside;
}
}
else{
break outside;
}
}
catch(IOException ex)
{
handleAndRaise("Error parsing JSON", ex);
}
}
jsonReader.ensureAtLeastOneField(writer);
writer.setValueCount(recordCount);
updateRunningCount();
return recordCount;
}
private void updateRunningCount() {
runningRecordCount += recordCount;
}
@Override
public void close() throws Exception {
if(stream != null) {
stream.close();
}
}
}
| |
/*
* Copyright 2022 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.libraries.mobiledatadownload.file.backends;
import android.accounts.Account;
import android.content.Context;
import android.net.Uri;
import android.os.Build;
import com.google.android.libraries.mobiledatadownload.file.common.MalformedUriException;
import com.google.android.libraries.mobiledatadownload.file.common.internal.LiteTransformFragments;
import com.google.android.libraries.mobiledatadownload.file.common.internal.Preconditions;
import com.google.android.libraries.mobiledatadownload.file.transforms.TransformProtos;
import com.google.common.collect.ImmutableList;
import com.google.mobiledatadownload.TransformProto;
import java.io.File;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.regex.Pattern;
import javax.annotation.Nullable;
/** Helper class for "android:" URIs. */
public final class AndroidUri {
/**
* Returns an android: scheme URI builder for package {@code packageName}. If no setter is called
* before {@link Builder#build}, the resultant URI will point to the common internal app storage,
* i.e. "android://<packageName>/files/common/shared/"
*
* @param context The android environment.
*/
public static Builder builder(Context context) {
return new Builder(context);
}
private AndroidUri() {}
// Module names are non-empty strings of [a-z] with interleaved underscores
private static final Pattern MODULE_PATTERN = Pattern.compile("[a-z]+(_[a-z]+)*");
// Name registered for the Android backend
static final String SCHEME_NAME = "android";
// URI path fragments with special meaning
static final String FILES_LOCATION = "files";
static final String MANAGED_LOCATION = "managed";
static final String CACHE_LOCATION = "cache";
// See https://developer.android.com/training/articles/direct-boot.html
static final String DIRECT_BOOT_FILES_LOCATION = "directboot-files";
static final String DIRECT_BOOT_CACHE_LOCATION = "directboot-cache";
static final String EXTERNAL_LOCATION = "external";
// The "managed" location maps to a subdirectory within /files/.
static final String MANAGED_FILES_DIR_SUBDIRECTORY = "managed";
static final String COMMON_MODULE = "common";
static final Account SHARED_ACCOUNT = AccountSerialization.SHARED_ACCOUNT;
// Module names reserved for future use or that are otherwise disallowed. Note that ImmutableSet
// is avoided in order to avoid guava dependency.
private static final Set<String> RESERVED_MODULES =
Collections.unmodifiableSet(
new HashSet<>(
Arrays.asList(
"default", "unused", "special", "reserved", "shared", "virtual", "managed")));
private static final Set<String> VALID_LOCATIONS =
Collections.unmodifiableSet(
new HashSet<>(
Arrays.asList(
FILES_LOCATION,
CACHE_LOCATION,
MANAGED_LOCATION,
DIRECT_BOOT_FILES_LOCATION,
DIRECT_BOOT_CACHE_LOCATION,
EXTERNAL_LOCATION)));
/**
* Validates the {@code location} of an Android URI path; "files" and "directboot" are the only
* valid strings.
*/
static void validateLocation(String location) {
Preconditions.checkArgument(
VALID_LOCATIONS.contains(location),
"The only supported locations are %s: %s",
VALID_LOCATIONS,
location);
}
/**
* Validates the {@code module} of an Android URI path. Any non-empty string of [a-z] with
* interleaved underscores that is not listed as reserved is valid.
*/
static void validateModule(String module) {
Preconditions.checkArgument(
MODULE_PATTERN.matcher(module).matches(), "Module must match [a-z]+(_[a-z]+)*: %s", module);
Preconditions.checkArgument(
!RESERVED_MODULES.contains(module),
"Module name is reserved and cannot be used: %s",
module);
}
/**
* Validates the {@code unusedRelativePath} of an Android URI path. At present time this is a
* no-op.
*
* @param unusedRelativePath Not used.
*/
static void validateRelativePath(String unusedRelativePath) {
// No-op
}
/** Builder for Android Uris. */
public static class Builder {
// URI authority; required
private final Context context;
// URI path components; optional
private String packageName; // TODO: should default be ""?
private String location = AndroidUri.FILES_LOCATION;
private String module = AndroidUri.COMMON_MODULE;
private Account account = AndroidUri.SHARED_ACCOUNT;
private String relativePath = "";
private final ImmutableList.Builder<String> encodedSpecs = ImmutableList.builder();
private Builder(Context context) {
Preconditions.checkArgument(context != null, "Context cannot be null");
this.context = context;
this.packageName = context.getPackageName();
}
/**
* Sets the package to use in the android uri AUTHORITY. Default is context.getPackageName().
*/
public Builder setPackage(String packageName) {
this.packageName = packageName;
return this;
}
private Builder setLocation(String location) {
AndroidUri.validateLocation(location);
this.location = location;
return this;
}
public Builder setManagedLocation() {
return setLocation(MANAGED_LOCATION);
}
public Builder setExternalLocation() {
return setLocation(EXTERNAL_LOCATION);
}
public Builder setDirectBootFilesLocation() {
return setLocation(DIRECT_BOOT_FILES_LOCATION);
}
public Builder setDirectBootCacheLocation() {
return setLocation(DIRECT_BOOT_CACHE_LOCATION);
}
/** Internal location, aka "files", is the default location. */
public Builder setInternalLocation() {
return setLocation(FILES_LOCATION);
}
public Builder setCacheLocation() {
return setLocation(CACHE_LOCATION);
}
public Builder setModule(String module) {
AndroidUri.validateModule(module);
this.module = module;
return this;
}
/**
* Sets the account. AndroidUri.SHARED_ACCOUNT is the default, and it shows up as "shared" on
* the filesystem.
*
* <p>This method performs some account validation. Android Account itself requires that both
* the type and name fields be present. In addition to this requirement, this backend requires
* that the type contain no colons (as these are the delimiter used internally for the account
* serialization), and that neither the type nor the name include any slashes (as these are file
* separators).
*
* <p>The account will be URL encoded in its URI representation (so, eg, "<internal>@gmail.com"
* will appear as "you%40gmail.com"), but not in the file path representation used to access
* disk.
*
* <p>Note the Linux filesystem accepts filenames composed of any bytes except "/" and NULL.
*
* @param account The account to set.
* @return The fluent Builder.
*/
public Builder setAccount(Account account) {
AccountSerialization.serialize(account); // performs validation internally
this.account = account;
return this;
}
/**
* Sets the component of the path after location, module and account. A single leading slash
* will be trimmed if present.
*/
public Builder setRelativePath(String relativePath) {
if (relativePath.startsWith("/")) {
relativePath = relativePath.substring(1);
}
AndroidUri.validateRelativePath(relativePath);
this.relativePath = relativePath;
return this;
}
/**
* Updates builder with multiple fields from file param: location, module, account and relative
* path. This method will fail on "managed" paths (see {@link fromFile(File, AccountManager)}).
*/
public Builder fromFile(File file) {
return fromAbsolutePath(file.getAbsolutePath(), /* accountManager= */ null);
}
/**
* Updates builder with multiple fields from file param: location, module, account and relative
* path. A non-null {@code accountManager} is required to handle "managed" paths.
*/
public Builder fromFile(File file, @Nullable AccountManager accountManager) {
return fromAbsolutePath(file.getAbsolutePath(), accountManager);
}
/**
* Updates builder with multiple fields from absolute path param: location, module, account and
* relative path. This method will fail on "managed" paths (see {@link fromAbsolutePath(String,
* AccountManager)}).
*/
public Builder fromAbsolutePath(String absolutePath) {
return fromAbsolutePath(absolutePath, /* accountManager= */ null);
}
/**
* Updates builder with multiple fields from absolute path param: location, module, account and
* relative path. A non-null {@code accountManager} is required to handle "managed" paths.
*/
// TODO(b/129467051): remove requirement for segments after 0th (logical location)
public Builder fromAbsolutePath(String absolutePath, @Nullable AccountManager accountManager) {
// Get the file's path within internal files, /module/account</relativePath>
File filesDir = AndroidFileEnvironment.getFilesDirWithPreNWorkaround(context);
String filesDirPath = filesDir.getAbsolutePath();
String cacheDirPath = context.getCacheDir().getAbsolutePath();
String managedDirPath = new File(filesDir, MANAGED_FILES_DIR_SUBDIRECTORY).getAbsolutePath();
String externalDirPath = null;
File externalFilesDir = context.getExternalFilesDir(null);
if (externalFilesDir != null) {
externalDirPath = externalFilesDir.getAbsolutePath();
}
String directBootFilesPath = null;
String directBootCachePath = null;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
// TODO(b/143610872): run after checking other dirs to minimize impact of new Context()'s
File dpsDataDir = AndroidFileEnvironment.getDeviceProtectedDataDir(context);
directBootFilesPath = new File(dpsDataDir, "files").getAbsolutePath();
directBootCachePath = new File(dpsDataDir, "cache").getAbsolutePath();
}
String internalPath;
if (absolutePath.startsWith(managedDirPath)) {
// managedDirPath must be checked before filesDirPath because filesDirPath is a prefix.
setLocation(AndroidUri.MANAGED_LOCATION);
internalPath = absolutePath.substring(managedDirPath.length());
} else if (absolutePath.startsWith(filesDirPath)) {
setLocation(AndroidUri.FILES_LOCATION);
internalPath = absolutePath.substring(filesDirPath.length());
} else if (absolutePath.startsWith(cacheDirPath)) {
setLocation(AndroidUri.CACHE_LOCATION);
internalPath = absolutePath.substring(cacheDirPath.length());
} else if (externalDirPath != null && absolutePath.startsWith(externalDirPath)) {
setLocation(AndroidUri.EXTERNAL_LOCATION);
internalPath = absolutePath.substring(externalDirPath.length());
} else if (directBootFilesPath != null && absolutePath.startsWith(directBootFilesPath)) {
setLocation(AndroidUri.DIRECT_BOOT_FILES_LOCATION);
internalPath = absolutePath.substring(directBootFilesPath.length());
} else if (directBootCachePath != null && absolutePath.startsWith(directBootCachePath)) {
setLocation(AndroidUri.DIRECT_BOOT_CACHE_LOCATION);
internalPath = absolutePath.substring(directBootCachePath.length());
} else {
throw new IllegalArgumentException(
"Path must be in app-private files dir or external files dir: " + absolutePath);
}
// Extract components according to android: file layout. The 0th element of split() will be
// an empty string preceding the first character "/"
List<String> pathFragments = Arrays.asList(internalPath.split(File.separator));
Preconditions.checkArgument(
pathFragments.size() >= 3,
"Path must be in module and account subdirectories: %s",
absolutePath);
setModule(pathFragments.get(1));
String accountStr = pathFragments.get(2);
if (MANAGED_LOCATION.equals(location) && !AccountSerialization.isSharedAccount(accountStr)) {
int accountId;
try {
accountId = Integer.parseInt(accountStr);
} catch (NumberFormatException e) {
throw new IllegalArgumentException(e);
}
// Blocks on disk IO to read account table.
// TODO(b/115940396): surface bad account as FileNotFoundException (change API signature?)
Preconditions.checkArgument(accountManager != null, "AccountManager cannot be null");
try {
setAccount(accountManager.getAccount(accountId).get());
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new IllegalArgumentException(new MalformedUriException(e));
} catch (ExecutionException e) {
throw new IllegalArgumentException(new MalformedUriException(e.getCause()));
}
} else {
setAccount(AccountSerialization.deserialize(accountStr));
}
setRelativePath(internalPath.substring(module.length() + accountStr.length() + 2));
return this;
}
public Builder withTransform(TransformProto.Transform spec) {
encodedSpecs.add(TransformProtos.toEncodedSpec(spec));
return this;
}
// TODO(b/115940396): add MalformedUriException to signature
public Uri build() {
String uriPath =
"/"
+ location
+ "/"
+ module
+ "/"
+ AccountSerialization.serialize(account)
+ "/"
+ relativePath;
String fragment = LiteTransformFragments.joinTransformSpecs(encodedSpecs.build());
return new Uri.Builder()
.scheme(AndroidUri.SCHEME_NAME)
.authority(packageName)
.path(uriPath)
.encodedFragment(fragment)
.build();
}
}
}
| |
/*
* Copyright (c) 2016 Goldman Sachs.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* and Eclipse Distribution License v. 1.0 which accompany this distribution.
* The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html
* and the Eclipse Distribution License is available at
* http://www.eclipse.org/org/documents/edl-v10.php.
*/
package org.eclipse.collections.impl.map.mutable;
import java.util.Iterator;
import java.util.Optional;
import org.eclipse.collections.api.RichIterable;
import org.eclipse.collections.api.block.function.Function;
import org.eclipse.collections.api.block.function.Function0;
import org.eclipse.collections.api.block.function.Function2;
import org.eclipse.collections.api.block.function.primitive.DoubleFunction;
import org.eclipse.collections.api.block.function.primitive.FloatFunction;
import org.eclipse.collections.api.block.function.primitive.IntFunction;
import org.eclipse.collections.api.block.function.primitive.LongFunction;
import org.eclipse.collections.api.block.predicate.Predicate2;
import org.eclipse.collections.api.block.procedure.Procedure2;
import org.eclipse.collections.api.map.MutableMap;
import org.eclipse.collections.api.map.MutableMapIterable;
import org.eclipse.collections.api.map.primitive.MutableObjectDoubleMap;
import org.eclipse.collections.api.map.primitive.MutableObjectLongMap;
import org.eclipse.collections.api.tuple.Pair;
import org.eclipse.collections.impl.block.factory.PrimitiveFunctions;
import org.eclipse.collections.impl.block.procedure.MutatingAggregationProcedure;
import org.eclipse.collections.impl.block.procedure.NonMutatingAggregationProcedure;
import org.eclipse.collections.impl.map.AbstractMapIterable;
import org.eclipse.collections.impl.map.mutable.primitive.ObjectDoubleHashMap;
import org.eclipse.collections.impl.map.mutable.primitive.ObjectLongHashMap;
import org.eclipse.collections.impl.tuple.AbstractImmutableEntry;
import org.eclipse.collections.impl.utility.LazyIterate;
import org.eclipse.collections.impl.utility.MapIterate;
public abstract class AbstractMutableMapIterable<K, V> extends AbstractMapIterable<K, V> implements MutableMapIterable<K, V>
{
@Override
public Iterator<V> iterator()
{
return this.values().iterator();
}
@Override
public V getIfAbsentPut(K key, Function0<? extends V> function)
{
V result = this.get(key);
if (this.isAbsent(result, key))
{
result = function.value();
this.put(key, result);
}
return result;
}
@Override
public V getIfAbsentPut(K key, V value)
{
V result = this.get(key);
if (this.isAbsent(result, key))
{
result = value;
this.put(key, result);
}
return result;
}
@Override
public V getIfAbsentPutWithKey(K key, Function<? super K, ? extends V> function)
{
return this.getIfAbsentPutWith(key, function, key);
}
@Override
public <P> V getIfAbsentPutWith(K key, Function<? super P, ? extends V> function, P parameter)
{
V result = this.get(key);
if (this.isAbsent(result, key))
{
result = function.valueOf(parameter);
this.put(key, result);
}
return result;
}
@Override
public V updateValue(K key, Function0<? extends V> factory, Function<? super V, ? extends V> function)
{
V oldValue = this.getIfAbsent(key, factory);
V newValue = function.valueOf(oldValue);
this.put(key, newValue);
return newValue;
}
@Override
public <P> V updateValueWith(K key, Function0<? extends V> factory, Function2<? super V, ? super P, ? extends V> function, P parameter)
{
V oldValue = this.getIfAbsent(key, factory);
V newValue = function.value(oldValue, parameter);
this.put(key, newValue);
return newValue;
}
@Override
public <VV> MutableMapIterable<VV, V> groupByUniqueKey(Function<? super V, ? extends VV> function)
{
return this.groupByUniqueKey(function, UnifiedMap.newMap());
}
@Override
public <K2, V2> MutableMap<K2, V2> aggregateInPlaceBy(
Function<? super V, ? extends K2> groupBy,
Function0<? extends V2> zeroValueFactory,
Procedure2<? super V2, ? super V> mutatingAggregator)
{
MutableMap<K2, V2> map = UnifiedMap.newMap();
this.forEach(new MutatingAggregationProcedure<>(map, groupBy, zeroValueFactory, mutatingAggregator));
return map;
}
@Override
public <K2, V2> MutableMap<K2, V2> aggregateBy(
Function<? super V, ? extends K2> groupBy,
Function0<? extends V2> zeroValueFactory,
Function2<? super V2, ? super V, ? extends V2> nonMutatingAggregator)
{
MutableMap<K2, V2> map = UnifiedMap.newMap();
this.forEach(new NonMutatingAggregationProcedure<>(map, groupBy, zeroValueFactory, nonMutatingAggregator));
return map;
}
@Override
public RichIterable<K> keysView()
{
return LazyIterate.adapt(this.keySet());
}
@Override
public RichIterable<V> valuesView()
{
return LazyIterate.adapt(this.values());
}
@Override
public RichIterable<Pair<K, V>> keyValuesView()
{
return LazyIterate.adapt(this.entrySet()).collect(AbstractImmutableEntry.getPairFunction());
}
@Override
public <K2, V2> MutableMap<K2, V2> collect(Function2<? super K, ? super V, Pair<K2, V2>> function)
{
return MapIterate.collect(this, function, UnifiedMap.newMap(this.size()));
}
@Override
public MutableMap<V, K> flipUniqueValues()
{
return MapIterate.flipUniqueValues(this);
}
@Override
public Pair<K, V> detect(Predicate2<? super K, ? super V> predicate)
{
return MapIterate.detect(this, predicate);
}
@Override
public Optional<Pair<K, V>> detectOptional(Predicate2<? super K, ? super V> predicate)
{
return MapIterate.detectOptional(this, predicate);
}
@Override
public <V1> MutableObjectLongMap<V1> sumByInt(Function<? super V, ? extends V1> groupBy, IntFunction<? super V> function)
{
MutableObjectLongMap<V1> result = ObjectLongHashMap.newMap();
return this.injectInto(result, PrimitiveFunctions.sumByIntFunction(groupBy, function));
}
@Override
public <V1> MutableObjectDoubleMap<V1> sumByFloat(Function<? super V, ? extends V1> groupBy, FloatFunction<? super V> function)
{
MutableObjectDoubleMap<V1> result = ObjectDoubleHashMap.newMap();
return this.injectInto(result, PrimitiveFunctions.sumByFloatFunction(groupBy, function));
}
@Override
public <V1> MutableObjectLongMap<V1> sumByLong(Function<? super V, ? extends V1> groupBy, LongFunction<? super V> function)
{
MutableObjectLongMap<V1> result = ObjectLongHashMap.newMap();
return this.injectInto(result, PrimitiveFunctions.sumByLongFunction(groupBy, function));
}
@Override
public <V1> MutableObjectDoubleMap<V1> sumByDouble(Function<? super V, ? extends V1> groupBy, DoubleFunction<? super V> function)
{
MutableObjectDoubleMap<V1> result = ObjectDoubleHashMap.newMap();
return this.injectInto(result, PrimitiveFunctions.sumByDoubleFunction(groupBy, function));
}
}
| |
/* Copyright (C) 2013-2020 TU Dortmund
* This file is part of AutomataLib, http://www.automatalib.net/.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.automatalib.util.automata.conformance;
import java.util.AbstractQueue;
import java.util.Comparator;
import java.util.Iterator;
import java.util.NoSuchElementException;
import com.google.common.collect.Iterators;
import net.automatalib.commons.smartcollections.ResizingArrayStorage;
import org.checkerframework.checker.nullness.qual.Nullable;
/**
* A priority queue which enforces that no two elements that it contains are equal wrt. the specified comparator (i.e.,
* {@link Comparator#compare(Object, Object)} does not return {@code 0} for two distinct elements).
* <p>
* If an element is inserted which, according to the {@link Comparator}, is already present, the specified {@link
* MergeOperation}'s {@link MergeOperation#merge(Object, Object)} method is invoked to determine the replacement
* element.
* <p>
* The name derives from the fact that subsequent calls to {@link #extractMin()} will yield a <i>strictly</i> growing
* sequence of elements.
* <p>
* This class does not disallow {@code null} values, but the supplied {@link Comparator} has to support them.
*
* @param <E>
* element type
*
* @author Malte Isberner
*/
public class StrictPriorityQueue<E> extends AbstractQueue<E> {
private final ResizingArrayStorage<E> storage = new ResizingArrayStorage<>(Object.class);
private final Comparator<? super E> comparator;
private final MergeOperation<E> mergeOp;
private int size;
/**
* Constructor.
*
* @param comparator
* the comparator used to compare elements
* @param mergeOp
* the merge operation to perform for equally-ranked elements
*/
public StrictPriorityQueue(Comparator<? super E> comparator, MergeOperation<E> mergeOp) {
this.comparator = comparator;
this.mergeOp = mergeOp;
}
/**
* Retrieves, but does not remove the element at the head of the queue (i.e., the minimum element in the queue).
* <p>
* Note: Unlike {@link #peek()}, this method throws a {@link NoSuchElementException} in case of an empty priority
* queue.
*
* @return the minimum element in the queue
*/
public E peekMin() {
if (size == 0) {
throw new NoSuchElementException();
}
return storage.array[0];
}
@Override
public boolean offer(E e) {
return insert(e);
}
/**
* Inserts an element into the queue.
*
* @param object
* the element to insert
*
* @return {@code true} if a new element has been inserted (i.e., the size has grown), {@code false} otherwise
* (i.e., an existing element has been replaced)
*/
public boolean insert(E object) {
storage.ensureCapacity(size + 1);
storage.array[size++] = object;
if (!upHeap()) {
size--;
return false;
}
return true;
}
/**
* Moves the last element upwards in the heap until the heap condition is restored.
*
* @return {@code true} if the element has been inserted, {@code false} if it has been merged with an existing
* element.
*/
private boolean upHeap() {
int currIdx = size - 1;
E elem = storage.array[currIdx];
int steps = 0;
while (currIdx > 0) {
int parentIdx = currIdx / 2;
E parent = storage.array[parentIdx];
int cmp = comparator.compare(elem, parent);
if (cmp == 0) {
storage.array[parentIdx] = mergeOp.merge(parent, elem);
return false;
} else if (cmp > 0) {
break;
}
currIdx = parentIdx;
steps++;
}
currIdx = size - 1;
for (int i = 0; i < steps; i++) {
int parentIdx = currIdx / 2;
storage.array[currIdx] = storage.array[parentIdx];
currIdx = parentIdx;
}
storage.array[currIdx] = elem;
return true;
}
@Override
public @Nullable E poll() {
if (size == 0) {
return null;
}
return extractMin();
}
/**
* Retrieves and removes the element at the head of the queue (i.e., the minimum element in the queue).
* <p>
* Note: Unlike {@link #poll()}, this method throws a {@link NoSuchElementException} in case of an empty priority
* queue.
*
* @return the minimum element in the queue
*/
@SuppressWarnings("nullness") // setting 'null' is fine, because we also decrease the size
public E extractMin() {
if (size == 0) {
throw new NoSuchElementException();
}
E result = storage.array[0];
size--;
if (size > 0) {
storage.array[0] = storage.array[size];
downHeap();
}
storage.array[size] = null;
return result;
}
/**
* Sifts the topmost element down into the heap until the heap condition is restored.
*/
private void downHeap() {
E elem = storage.array[0];
int currIdx = 0;
while (2 * currIdx < size) {
int leftChildIdx = 2 * currIdx;
E leftChild = storage.array[leftChildIdx];
if (comparator.compare(elem, leftChild) > 0) {
storage.array[currIdx] = leftChild;
storage.array[leftChildIdx] = elem;
currIdx = leftChildIdx;
} else if (2 * currIdx + 1 < size) {
int rightChildIdx = 2 * currIdx + 1;
E rightChild = storage.array[rightChildIdx];
if (comparator.compare(elem, rightChild) > 0) {
storage.array[currIdx] = rightChild;
storage.array[rightChildIdx] = elem;
currIdx = rightChildIdx;
} else {
return;
}
} else {
return;
}
}
}
@Override
public @Nullable E peek() {
if (size == 0) {
return null;
}
return storage.array[0];
}
@Override
public Iterator<E> iterator() {
return Iterators.forArray(storage.array);
}
@Override
public int size() {
return size;
}
@Override
public boolean isEmpty() {
return size == 0;
}
@Override
public String toString() {
if (size == 0) {
return "[]";
}
StringBuilder result = new StringBuilder();
result.append('[').append(storage.array[0]);
for (int i = 1; i < size; i++) {
result.append(',');
result.append(storage.array[i]);
}
result.append(']');
return result.toString();
}
/**
* The merge operation two perform on two equally-ranked elements.
*
* @param <E>
* element type
*
* @author Malte Isberner
*/
public interface MergeOperation<E> {
/**
* Merges the old element and the new element into a replacement element.
* <p>
* Implementations can assume that {@code cmp.compare(oldObject, newObject) == 0} holds for the comparator
* {@code cmp} specified in {@link StrictPriorityQueue#StrictPriorityQueue(Comparator, MergeOperation)}. In
* turn, they must guarantee that also {@code cmp.compare(result, oldObject) == 0} holds for the return value
* {@code result}.
*
* @param oldObject
* the old element
* @param newObject
* the new element
*
* @return the replacement element
*/
E merge(E oldObject, E newObject);
}
}
| |
/*
* Copyright 2014 Uwe Trottmann
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.battlelancer.seriesguide.thetvdbapi;
import android.content.ContentProviderOperation;
import android.content.ContentValues;
import android.content.Context;
import android.content.OperationApplicationException;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.sax.Element;
import android.sax.EndElementListener;
import android.sax.EndTextElementListener;
import android.sax.RootElement;
import android.text.TextUtils;
import android.text.format.DateUtils;
import android.util.Xml;
import com.battlelancer.seriesguide.R;
import com.battlelancer.seriesguide.dataliberation.JsonExportTask.ShowStatusExport;
import com.battlelancer.seriesguide.dataliberation.model.Show;
import com.battlelancer.seriesguide.items.SearchResult;
import com.battlelancer.seriesguide.provider.SeriesGuideContract.Episodes;
import com.battlelancer.seriesguide.provider.SeriesGuideContract.Seasons;
import com.battlelancer.seriesguide.provider.SeriesGuideContract.Shows;
import com.battlelancer.seriesguide.settings.DisplaySettings;
import com.battlelancer.seriesguide.util.DBUtils;
import com.battlelancer.seriesguide.util.ImageProvider;
import com.battlelancer.seriesguide.util.ServiceUtils;
import com.battlelancer.seriesguide.util.TimeTools;
import com.battlelancer.seriesguide.util.TraktTools;
import com.battlelancer.seriesguide.util.Utils;
import com.jakewharton.trakt.Trakt;
import com.jakewharton.trakt.entities.TvShow;
import com.jakewharton.trakt.enumerations.Extended;
import com.uwetrottmann.androidutils.AndroidUtils;
import java.io.FilterInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.net.HttpURLConnection;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.zip.ZipInputStream;
import org.xml.sax.ContentHandler;
import org.xml.sax.SAXException;
import retrofit.RetrofitError;
import timber.log.Timber;
/**
* Provides access to the TheTVDb.com XML API throwing in some additional data from trakt.tv here
* and there.
*/
public class TheTVDB {
public interface ShowStatus {
int CONTINUING = 1;
int ENDED = 0;
int UNKNOWN = -1;
}
public static final String TVDB_MIRROR_BANNERS = "http://thetvdb.com/banners/";
private static final String TVDB_API_URL = "http://thetvdb.com/api/";
/**
* Returns true if the given show has not been updated in the last 12 hours.
*/
public static boolean isUpdateShow(Context context, int showTvdbId) {
final Cursor show = context.getContentResolver().query(Shows.buildShowUri(showTvdbId),
new String[] {
Shows._ID, Shows.LASTUPDATED
}, null, null, null
);
boolean isUpdate = false;
if (show != null) {
if (show.moveToFirst()) {
long lastUpdateTime = show.getLong(1);
if (System.currentTimeMillis() - lastUpdateTime > DateUtils.HOUR_IN_MILLIS * 12) {
isUpdate = true;
}
}
show.close();
}
return isUpdate;
}
/**
* Adds a show and its episodes to the database. If the show already exists, does nothing.
*
* @return whether the show and its episodes were added
*/
public static boolean addShow(int showTvdbId, List<TvShow> seenShows,
List<TvShow> collectedShows, Context context) throws TvdbException {
boolean isShowExists = DBUtils.isShowExists(context, showTvdbId);
if (isShowExists) {
return false;
}
String language = DisplaySettings.getContentLanguage(context);
final ArrayList<ContentProviderOperation> batch = new ArrayList<>();
// get show and episode info from TVDb
Show show = fetchShow(showTvdbId, language, context);
batch.add(DBUtils.buildShowOp(show, context, true));
getEpisodesAndUpdateDatabase(context, show, language, batch);
// try to set watched and collected flags from trakt
storeTraktFlags(showTvdbId, seenShows, context, true);
storeTraktFlags(showTvdbId, collectedShows, context, false);
// calculate the next episode to display
DBUtils.updateLatestEpisode(context, showTvdbId);
return true;
}
/**
* Updates show. Adds new, updates changed and removes orphaned episodes.
*/
public static void updateShow(Context context, int showTvdbId) throws TvdbException {
String language = DisplaySettings.getContentLanguage(context);
final ArrayList<ContentProviderOperation> batch = new ArrayList<>();
Show show = fetchShow(showTvdbId, language, context);
batch.add(DBUtils.buildShowOp(show, context, false));
getEpisodesAndUpdateDatabase(context, show, language, batch);
}
/**
* Search for shows which include a certain keyword in their title. Dependent on the TheTVDB
* search algorithms.
*
* @return a List with SearchResult objects, max 100
*/
public static List<SearchResult> searchShow(String title, Context context)
throws TvdbException {
final List<SearchResult> series = new ArrayList<SearchResult>();
final SearchResult currentShow = new SearchResult();
RootElement root = new RootElement("Data");
Element item = root.getChild("Series");
// set handlers for elements we want to react to
item.setEndElementListener(new EndElementListener() {
public void end() {
series.add(currentShow.copy());
}
});
item.getChild("id").setEndTextElementListener(new EndTextElementListener() {
public void end(String body) {
currentShow.tvdbid = Integer.valueOf(body);
}
});
item.getChild("SeriesName").setEndTextElementListener(new EndTextElementListener() {
public void end(String body) {
currentShow.title = body.trim();
}
});
item.getChild("Overview").setEndTextElementListener(new EndTextElementListener() {
public void end(String body) {
currentShow.overview = body.trim();
}
});
String language = DisplaySettings.getContentLanguage(context);
String url;
try {
url = TVDB_API_URL + "GetSeries.php?seriesname="
+ URLEncoder.encode(title, "UTF-8")
+ (language != null ? "&language=" + language : "");
} catch (UnsupportedEncodingException e) {
throw new TvdbException("Encoding show title failed", e);
}
try {
InputStream in = null;
try {
in = AndroidUtils.downloadUrl(url);
Xml.parse(in, Xml.Encoding.UTF_8, root.getContentHandler());
} finally {
if (in != null) {
in.close();
}
}
} catch (IOException | SAXException e) {
throw new TvdbException("Downloading or parsing search results failed", e);
}
return series;
}
// Values based on the assumption that sync runs about every 24 hours
private static final long UPDATE_THRESHOLD_WEEKLYS_MS = 6 * DateUtils.DAY_IN_MILLIS +
12 * DateUtils.HOUR_IN_MILLIS;
private static final long UPDATE_THRESHOLD_DAILYS_MS = 1 * DateUtils.DAY_IN_MILLIS
+ 12 * DateUtils.HOUR_IN_MILLIS;
/**
* Return list of show TVDb ids hitting a x-day limit.
*/
public static int[] deltaUpdateShows(long currentTime, Context context) {
final List<Integer> updatableShowIds = new ArrayList<>();
// get existing show ids
final Cursor shows = context.getContentResolver().query(Shows.CONTENT_URI, new String[] {
Shows._ID, Shows.LASTUPDATED, Shows.AIRSDAYOFWEEK
}, null, null, null);
if (shows != null) {
while (shows.moveToNext()) {
boolean isDailyShow = TimeTools.getDayOfWeek(shows.getString(2))
== TimeTools.RELEASE_DAY_DAILY;
long lastUpdatedTime = shows.getLong(1);
// update daily shows more frequently than weekly shows
if (currentTime - lastUpdatedTime >
(isDailyShow ? UPDATE_THRESHOLD_DAILYS_MS : UPDATE_THRESHOLD_WEEKLYS_MS)) {
// add shows that are due for updating
updatableShowIds.add(shows.getInt(0));
}
}
long showCount = (long) shows.getCount();
Utils.trackCustomEvent(context, "Statistics", "Shows", String.valueOf(showCount));
shows.close();
}
// copy to int array
int[] showTvdbIds = new int[updatableShowIds.size()];
for (int i = 0; i < updatableShowIds.size(); i++) {
showTvdbIds[i] = updatableShowIds.get(i);
}
return showTvdbIds;
}
/**
* Fetches episodes for the given show from TVDb, adds database ops for them. Then adds all
* information to the database.
*/
private static boolean getEpisodesAndUpdateDatabase(Context context, Show show,
String language, final ArrayList<ContentProviderOperation> batch)
throws TvdbException {
// get ops for episodes of this show
ArrayList<ContentValues> importShowEpisodes = fetchEpisodes(batch, show, language,
context);
ContentValues[] newEpisodesValues = new ContentValues[importShowEpisodes.size()];
newEpisodesValues = importShowEpisodes.toArray(newEpisodesValues);
try {
DBUtils.applyInSmallBatches(context, batch);
} catch (OperationApplicationException e) {
throw new TvdbException("Problem applying batch operation for " + show.tvdbId, e);
}
// insert all new episodes in bulk
context.getContentResolver().bulkInsert(Episodes.CONTENT_URI, newEpisodesValues);
return true;
}
private static void storeTraktFlags(int showTvdbId, List<TvShow> shows, Context context,
boolean isSeenFlags) {
// try to find seen episodes from trakt of the given show
for (TvShow tvShow : shows) {
if (tvShow == null || tvShow.tvdb_id == null || tvShow.tvdb_id != showTvdbId) {
// skip, does not match
continue;
}
TraktTools.applyEpisodeFlagChanges(context, tvShow,
isSeenFlags ? Episodes.WATCHED : Episodes.COLLECTED, false);
// done, found the show we were looking for
return;
}
}
/**
* Get show details from TVDb in the user preferred language ({@link
* DisplaySettings#getContentLanguage(android.content.Context)}).
*/
public static Show getShow(Context context, int showTvdbId) throws TvdbException {
String language = DisplaySettings.getContentLanguage(context);
return downloadAndParseShow(context, showTvdbId, language);
}
/**
* Get show details from TVDb. Tries to fetch additional information from trakt.
*
* @param language A TVDb language code (see <a href="http://www.thetvdb.com/wiki/index.php/API:languages.xml"
* >TVDb wiki</a>).
*/
private static Show fetchShow(int showTvdbId, String language, Context context)
throws TvdbException {
// get show details from TVDb
Show show = downloadAndParseShow(context, showTvdbId, language);
// get some more details from trakt
TvShow traktShow = null;
Trakt manager = ServiceUtils.getTrakt(context);
if (manager != null) {
try {
traktShow = manager.showService().summary(showTvdbId, Extended.DEFAULT);
} catch (RetrofitError e) {
Timber.e(e, "Downloading summary failed");
}
}
if (traktShow == null) {
throw new TvdbException("Could not load show from trakt: " + showTvdbId);
}
show.airtime = TimeTools.parseShowReleaseTime(traktShow.airTime);
show.country = traktShow.country;
// try to download the show poster
if (Utils.isAllowedLargeDataConnection(context, false)) {
fetchArt(show.poster, true, context);
}
return show;
}
/**
* Get a show from TVDb.
*/
private static Show downloadAndParseShow(final Context context, int showTvdbId, String language)
throws TvdbException {
final Show currentShow = new Show();
RootElement root = new RootElement("Data");
Element show = root.getChild("Series");
// set handlers for elements we want to react to
show.getChild("id").setEndTextElementListener(new EndTextElementListener() {
public void end(String body) {
// NumberFormatException may be thrown, will stop parsing
currentShow.tvdbId = Integer.parseInt(body);
}
});
show.getChild("SeriesName").setEndTextElementListener(new EndTextElementListener() {
public void end(String body) {
currentShow.title = body;
}
});
show.getChild("Overview").setEndTextElementListener(new EndTextElementListener() {
public void end(String body) {
currentShow.overview = body;
}
});
show.getChild("Actors").setEndTextElementListener(new EndTextElementListener() {
public void end(String body) {
currentShow.actors = body.trim();
}
});
show.getChild("Airs_DayOfWeek").setEndTextElementListener(new EndTextElementListener() {
public void end(String body) {
currentShow.airday = body.trim();
}
});
show.getChild("FirstAired").setEndTextElementListener(new EndTextElementListener() {
public void end(String body) {
currentShow.firstAired = body;
}
});
show.getChild("Genre").setEndTextElementListener(new EndTextElementListener() {
public void end(String body) {
currentShow.genres = body.trim();
}
});
show.getChild("Network").setEndTextElementListener(new EndTextElementListener() {
public void end(String body) {
currentShow.network = body;
}
});
show.getChild("Rating").setEndTextElementListener(new EndTextElementListener() {
public void end(String body) {
try {
currentShow.rating = Double.parseDouble(body);
} catch (NumberFormatException e) {
currentShow.rating = 0.0;
}
}
});
show.getChild("Runtime").setEndTextElementListener(new EndTextElementListener() {
public void end(String body) {
try {
currentShow.runtime = Integer.parseInt(body);
} catch (NumberFormatException e) {
// an hour is always a good estimate...
currentShow.runtime = 60;
}
}
});
show.getChild("Status").setEndTextElementListener(new EndTextElementListener() {
public void end(String body) {
if (body.length() == 10) {
currentShow.status = ShowStatusExport.CONTINUING;
} else if (body.length() == 5) {
currentShow.status = ShowStatusExport.ENDED;
} else {
currentShow.status = ShowStatusExport.UNKNOWN;
}
}
});
show.getChild("ContentRating").setEndTextElementListener(new EndTextElementListener() {
public void end(String body) {
currentShow.contentRating = body;
}
});
show.getChild("poster").setEndTextElementListener(new EndTextElementListener() {
public void end(String body) {
currentShow.poster = body != null ? body.trim() : "";
}
});
show.getChild("IMDB_ID").setEndTextElementListener(new EndTextElementListener() {
public void end(String body) {
currentShow.imdbId = body.trim();
}
});
show.getChild("lastupdated").setEndTextElementListener(new EndTextElementListener() {
public void end(String body) {
try {
currentShow.lastEdited = Long.parseLong(body);
} catch (NumberFormatException e) {
currentShow.lastEdited = 0;
}
}
});
// build TVDb url, get localized content when possible
String url = TVDB_API_URL + context.getResources().getString(R.string.tvdb_apikey)
+ "/series/" + showTvdbId + "/" + (language != null ? language + ".xml" : "");
downloadAndParse(url, root.getContentHandler(), false);
return currentShow;
}
private static ArrayList<ContentValues> fetchEpisodes(
ArrayList<ContentProviderOperation> batch, Show show, String language, Context context)
throws TvdbException {
String url = TVDB_API_URL + context.getResources().getString(R.string.tvdb_apikey)
+ "/series/" + show.tvdbId + "/all/"
+ (language != null ? language + ".zip" : "en.zip");
return parseEpisodes(batch, url, show, context);
}
/**
* Loads the given zipped XML and parses containing episodes to create an array of {@link
* ContentValues} for new episodes.<br> Adds update ops for updated episodes and delete ops for
* local orphaned episodes to the given {@link ContentProviderOperation} batch.
*/
private static ArrayList<ContentValues> parseEpisodes(
final ArrayList<ContentProviderOperation> batch, String url, final Show show,
Context context) throws TvdbException {
final ArrayList<ContentValues> newEpisodesValues = new ArrayList<>();
final long dateLastMonthEpoch = (System.currentTimeMillis()
- (DateUtils.DAY_IN_MILLIS * 30)) / 1000;
RootElement root = new RootElement("Data");
Element episode = root.getChild("Episode");
final HashMap<Integer, Long> localEpisodeIds = DBUtils
.getEpisodeMapForShow(context, show.tvdbId);
final HashMap<Integer, Long> removableEpisodeIds = new HashMap<>(
localEpisodeIds); // just copy episodes list, then remove valid ones
final HashSet<Integer> localSeasonIds = DBUtils.getSeasonIdsOfShow(context, show.tvdbId);
// store updated seasons to avoid duplicate ops
final HashSet<Integer> seasonIdsToUpdate = new HashSet<>();
final ContentValues values = new ContentValues();
// set handlers for elements we want to react to
episode.setEndElementListener(new EndElementListener() {
public void end() {
Integer episodeId = values.getAsInteger(Episodes._ID);
if (episodeId == null || episodeId <= 0) {
// invalid id, skip
return;
}
// don't clean up this episode
removableEpisodeIds.remove(episodeId);
// decide whether to insert or update
if (localEpisodeIds.containsKey(episodeId)) {
/*
* Update uses provider ops which take a long time. Only
* update if episode was edited on TVDb or is not older than
* a month (ensures show air time changes get stored).
*/
Long lastEditEpoch = localEpisodeIds.get(episodeId);
Long lastEditEpochNew = values.getAsLong(Episodes.LAST_EDITED);
if (lastEditEpoch != null && lastEditEpochNew != null
&& (lastEditEpoch < lastEditEpochNew
|| dateLastMonthEpoch < lastEditEpoch)) {
// complete update op for episode
batch.add(DBUtils.buildEpisodeUpdateOp(values));
}
} else {
// episode does not exist, yet
newEpisodesValues.add(new ContentValues(values));
}
Integer seasonId = values.getAsInteger(Seasons.REF_SEASON_ID);
if (seasonId != null && !seasonIdsToUpdate.contains(seasonId)) {
// add insert/update op for season
batch.add(DBUtils.buildSeasonOp(values, !localSeasonIds.contains(seasonId)));
seasonIdsToUpdate.add(values.getAsInteger(Seasons.REF_SEASON_ID));
}
values.clear();
}
});
episode.getChild("id").setEndTextElementListener(new EndTextElementListener() {
public void end(String body) {
values.put(Episodes._ID, body.trim());
}
});
episode.getChild("EpisodeNumber").setEndTextElementListener(new EndTextElementListener() {
public void end(String body) {
values.put(Episodes.NUMBER, body.trim());
}
});
episode.getChild("absolute_number").setEndTextElementListener(new EndTextElementListener() {
public void end(String body) {
values.put(Episodes.ABSOLUTE_NUMBER, body.trim());
}
});
episode.getChild("SeasonNumber").setEndTextElementListener(new EndTextElementListener() {
public void end(String body) {
values.put(Episodes.SEASON, body.trim());
}
});
episode.getChild("DVD_episodenumber").setEndTextElementListener(
new EndTextElementListener() {
public void end(String body) {
values.put(Episodes.DVDNUMBER, body.trim());
}
}
);
episode.getChild("FirstAired").setEndTextElementListener(new EndTextElementListener() {
public void end(String body) {
long episodeAirTime = TimeTools
.parseEpisodeReleaseTime(body, show.airtime, show.country);
values.put(Episodes.FIRSTAIREDMS, episodeAirTime);
values.put(Episodes.FIRSTAIRED, body.trim());
}
});
episode.getChild("EpisodeName").setEndTextElementListener(new EndTextElementListener() {
public void end(String body) {
values.put(Episodes.TITLE, body.trim());
}
});
episode.getChild("Overview").setEndTextElementListener(new EndTextElementListener() {
public void end(String body) {
values.put(Episodes.OVERVIEW, body.trim());
}
});
episode.getChild("seasonid").setEndTextElementListener(new EndTextElementListener() {
public void end(String body) {
values.put(Seasons.REF_SEASON_ID, body.trim());
}
});
episode.getChild("seriesid").setEndTextElementListener(new EndTextElementListener() {
public void end(String body) {
values.put(Shows.REF_SHOW_ID, body.trim());
}
});
episode.getChild("Director").setEndTextElementListener(new EndTextElementListener() {
public void end(String body) {
values.put(Episodes.DIRECTORS, body.trim());
}
});
episode.getChild("GuestStars").setEndTextElementListener(new EndTextElementListener() {
public void end(String body) {
values.put(Episodes.GUESTSTARS, body.trim());
}
});
episode.getChild("Writer").setEndTextElementListener(new EndTextElementListener() {
public void end(String body) {
values.put(Episodes.WRITERS, body.trim());
}
});
episode.getChild("Rating").setEndTextElementListener(new EndTextElementListener() {
public void end(String body) {
values.put(Episodes.RATING, body.trim());
}
});
episode.getChild("filename").setEndTextElementListener(new EndTextElementListener() {
public void end(String body) {
values.put(Episodes.IMAGE, body.trim());
}
});
episode.getChild("IMDB_ID").setEndTextElementListener(new EndTextElementListener() {
public void end(String body) {
values.put(Episodes.IMDBID, body.trim());
}
});
episode.getChild("lastupdated").setEndTextElementListener(new EndTextElementListener() {
public void end(String body) {
// system populated field, trimming not necessary
try {
values.put(Episodes.LAST_EDITED, Long.valueOf(body));
} catch (NumberFormatException e) {
values.put(Episodes.LAST_EDITED, 0);
}
}
});
downloadAndParse(url, root.getContentHandler(), true);
// add delete ops for leftover episodeIds in our db
for (Integer episodeId : removableEpisodeIds.keySet()) {
batch.add(ContentProviderOperation.newDelete(Episodes.buildEpisodeUri(episodeId))
.build());
}
return newEpisodesValues;
}
/**
* Downloads the XML or ZIP file from the given URL, passing a valid response to {@link
* Xml#parse(InputStream, android.util.Xml.Encoding, ContentHandler)} using the given {@link
* ContentHandler}.
*/
private static void downloadAndParse(String urlString,
ContentHandler handler, boolean isZipFile) throws TvdbException {
try {
final InputStream input = AndroidUtils.downloadUrl(urlString);
if (isZipFile) {
// We downloaded the compressed file from TheTVDB
final ZipInputStream zipin = new ZipInputStream(input);
zipin.getNextEntry();
try {
Xml.parse(zipin, Xml.Encoding.UTF_8, handler);
} finally {
if (zipin != null) {
zipin.close();
}
}
} else {
try {
Xml.parse(input, Xml.Encoding.UTF_8, handler);
} finally {
if (input != null) {
input.close();
}
}
}
} catch (SAXException e) {
throw new TvdbException("Problem parsing " + urlString, e);
} catch (IOException e) {
throw new TvdbException("Problem downloading " + urlString, e);
} catch (AssertionError ae) {
// looks like Xml.parse is throwing AssertionErrors instead of IOExceptions
throw new TvdbException("Problem parsing " + urlString);
} catch (Exception e) {
throw new TvdbException("Problem downloading and parsing " + urlString, e);
}
}
/**
* Tries to download art from the thetvdb banner TVDB_MIRROR. Ignores blank ("") or null paths
* and skips existing images. Returns true even if there was no art downloaded.
*
* @param fileName of image
* @return false if not all images could be fetched. true otherwise, even if nothing was
* downloaded
*/
public static boolean fetchArt(String fileName, boolean isPoster, Context context) {
if (context == null || TextUtils.isEmpty(fileName)) {
return true;
}
final ImageProvider imageProvider = ImageProvider.getInstance(context);
if (!imageProvider.exists(fileName)) {
final String imageUrl;
if (isPoster) {
// the cached version is a lot smaller, but still big enough for
// our purposes
imageUrl = TVDB_MIRROR_BANNERS + "_cache/" + fileName;
} else {
imageUrl = TVDB_MIRROR_BANNERS + fileName;
}
// try to download, decode and store the image
final Bitmap bitmap = downloadBitmap(imageUrl, context);
if (bitmap != null) {
imageProvider.storeImage(fileName, bitmap, isPoster);
} else {
return false;
}
}
return true;
}
private static Bitmap downloadBitmap(String url, Context context) {
InputStream inputStream = null;
HttpURLConnection urlConnection = null;
try {
urlConnection = AndroidUtils.buildHttpUrlConnection(url);
urlConnection.connect();
long imageSize = urlConnection.getContentLength();
// allow images up to 300 kBytes (although size is always around
// 30 kBytes for posters and 100 kBytes for episode images)
if (imageSize > 300000 || imageSize < 1) {
return null;
} else {
inputStream = urlConnection.getInputStream();
// return BitmapFactory.decodeStream(inputStream);
// Bug on slow connections, fixed in future release.
try {
return BitmapFactory.decodeStream(new FlushedInputStream(inputStream));
} catch (OutOfMemoryError e) {
Timber.e(e, "Out of memory while retrieving bitmap from " + url);
}
}
} catch (IOException e) {
Timber.e(e, "I/O error retrieving bitmap from " + url);
} catch (IllegalStateException e) {
Timber.e(e, "Incorrect URL: " + url);
} catch (Exception e) {
Timber.e(e, "Error while retrieving bitmap from " + url);
} finally {
if (inputStream != null) {
try {
inputStream.close();
} catch (IOException e) {
Timber.e(e, "I/O error retrieving bitmap from " + url);
}
} else {
if (urlConnection != null) {
urlConnection.disconnect();
}
}
}
return null;
}
/*
* An InputStream that skips the exact number of bytes provided, unless it
* reaches EOF.
*/
static class FlushedInputStream extends FilterInputStream {
public FlushedInputStream(InputStream inputStream) {
super(inputStream);
}
@Override
public long skip(long n) throws IOException {
long totalBytesSkipped = 0L;
while (totalBytesSkipped < n) {
long bytesSkipped = in.skip(n - totalBytesSkipped);
if (bytesSkipped == 0L) {
int b = read();
if (b < 0) {
break; // we reached EOF
} else {
bytesSkipped = 1; // we read one byte
}
}
totalBytesSkipped += bytesSkipped;
}
return totalBytesSkipped;
}
}
}
| |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.idea.maven.dom;
import com.intellij.codeInsight.intention.IntentionAction;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.openapi.fileEditor.FileEditorManager;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.PsiDocumentManager;
public class MavenModuleCompletionAndResolutionTest extends MavenDomWithIndicesTestCase {
public void testCompleteFromAllAvailableModules() {
createProjectPom("<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<packaging>pom</packaging>" +
"<modules>" +
" <module>m1</module>" +
" <module>m2</module>" +
"</modules>");
createModulePom("m1",
"<groupId>test</groupId>" +
"<artifactId>m1</artifactId>" +
"<version>1</version>");
VirtualFile module2Pom = createModulePom("m2",
"<groupId>test</groupId>" +
"<artifactId>m2</artifactId>" +
"<version>1</version>" +
"<packaging>pom</packaging>" +
"<modules>" +
" <module>m3</module>" +
"</modules>");
createModulePom("m2/m3",
"<groupId>test</groupId>" +
"<artifactId>m3</artifactId>" +
"<version>1</version>");
importProject();
assertModules("project", "m1", "m2", "m3");
createProjectPom("<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<packaging>pom</packaging>" +
"<modules>" +
" <module>m1</module>" +
" <module>m2</module>" +
" <module><caret></module>" +
"</modules>");
assertCompletionVariants(myProjectPom, "m1", "m2", "m2/m3");
createModulePom("m2", "<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<packaging>pom</packaging>" +
"<modules>" +
" <module>m3</module>" +
" <module><caret></module>" +
"</modules>");
assertCompletionVariants(module2Pom, "..", "../m1", "m3");
}
public void testDoesNotCompeteIfThereIsNoModules() {
createProjectPom("<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<packaging>pom</packaging>");
importProject();
createProjectPom("<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<packaging>pom</packaging>" +
"<modules>" +
" <module><caret></module>" +
"</modules>");
assertCompletionVariants(myProjectPom);
}
public void testIncludesAllThePomsAvailable() {
createProjectPom("<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<packaging>pom</packaging>");
importProject();
createModulePom("subDir1",
"<groupId>test</groupId>" +
"<artifactId>m1</artifactId>" +
"<version>1</version>");
createModulePom("subDir1/subDir2",
"<groupId>test</groupId>" +
"<artifactId>m2</artifactId>" +
"<version>1</version>");
createProjectPom("<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<packaging>pom</packaging>" +
"<modules>" +
" <module><caret></module>" +
"</modules>");
assertCompletionVariants(myProjectPom, "subDir1", "subDir1/subDir2");
}
public void testResolution() throws Exception {
createProjectPom("<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<packaging>pom</packaging>" +
"<modules>" +
" <module>m1</module>" +
" <module>m2</module>" +
"</modules>");
VirtualFile m1 = createModulePom("m1",
"<groupId>test</groupId>" +
"<artifactId>m1</artifactId>" +
"<version>1</version>");
VirtualFile m2 = createModulePom("m2",
"<groupId>test</groupId>" +
"<artifactId>m2</artifactId>" +
"<version>1</version>");
importProject();
createProjectPom("<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<packaging>pom</packaging>" +
"<modules>" +
" <module>m<caret>1</module>" +
" <module>m2</module>" +
"</modules>");
assertResolved(myProjectPom, findPsiFile(m1), "m1");
createProjectPom("<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<packaging>pom</packaging>" +
"<modules>" +
" <module>m1</module>" +
" <module>m<caret>2</module>" +
"</modules>");
assertResolved(myProjectPom, findPsiFile(m2), "m2");
createProjectPom("<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<packaging>pom</packaging>" +
"<modules>" +
" <module>unknown<caret>Module</module>" +
"</modules>");
assertUnresolved(myProjectPom, "unknownModule");
}
public void testResolutionWithSlashes() throws Exception {
createProjectPom("<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<packaging>pom</packaging>" +
"<modules>" +
" <module>./m</module>" +
"</modules>");
VirtualFile m = createModulePom("m",
"<groupId>test</groupId>" +
"<artifactId>m</artifactId>" +
"<version>1</version>");
importProject();
createProjectPom("<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<packaging>pom</packaging>" +
"<modules>" +
" <module>./m<caret></module>" +
"</modules>");
assertResolved(myProjectPom, findPsiFile(m), "./m");
createProjectPom("<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<packaging>pom</packaging>" +
"<modules>" +
" <module>.\\m<caret></module>" +
"</modules>");
assertResolved(myProjectPom, findPsiFile(m), ".\\m");
}
public void testResolutionWithProperties() throws Exception {
createProjectPom("<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<packaging>pom</packaging>" +
"<properties>" +
" <dirName>subDir</dirName>" +
"</properties>" +
"<modules>" +
" <module>${dirName}/m</module>" +
"</modules>");
VirtualFile m = createModulePom("subDir/m",
"<groupId>test</groupId>" +
"<artifactId>m</artifactId>" +
"<version>1</version>");
importProject();
createProjectPom("<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<packaging>pom</packaging>" +
"<properties>" +
" <dirName>subDir</dirName>" +
"</properties>" +
"<modules>" +
" <module><caret>${dirName}/m</module>" +
"</modules>");
assertResolved(myProjectPom, findPsiFile(m), "subDir/m");
createProjectPom("<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<packaging>pom</packaging>" +
"<properties>" +
" <dirName>subDir</dirName>" +
"</properties>" +
"<modules>" +
" <module>${<caret>dirName}/m</module>" +
"</modules>");
assertResolved(myProjectPom, findTag(myProjectPom, "project.properties.dirName"));
}
public void testCreatePomQuickFix() {
createProjectPom("<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<packaging>pom</packaging>");
importProject();
createProjectPom("<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<packaging>pom</packaging>" +
"<modules>" +
" <module>subDir/new<caret>Module</module>" +
"</modules>");
IntentionAction i = getIntentionAtCaret(getCreateModuleIntention());
assertNotNull(i);
myFixture.launchAction(i);
assertCreateModuleFixResult(
"subDir/newModule/pom.xml",
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" +
"<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n" +
" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n" +
" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n" +
" <modelVersion>4.0.0</modelVersion>\n" +
"\n" +
" <groupId>test</groupId>\n" +
" <artifactId>newModule</artifactId>\n" +
" <version>1</version>\n" +
"\n" +
" \n" +
"</project>");
}
public void testCreatePomQuickFixCustomPomFileName() {
createProjectPom("<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<packaging>pom</packaging>");
importProject();
createProjectPom("<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<packaging>pom</packaging>" +
"<modules>" +
" <module>subDir/new<caret>Module.xml</module>" +
"</modules>");
IntentionAction i = getIntentionAtCaret(getCreateModuleIntention());
assertNotNull(i);
myFixture.launchAction(i);
assertCreateModuleFixResult(
"subDir/newModule.xml",
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" +
"<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n" +
" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n" +
" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n" +
" <modelVersion>4.0.0</modelVersion>\n" +
"\n" +
" <groupId>test</groupId>\n" +
" <artifactId>subDir</artifactId>\n" +
" <version>1</version>\n" +
"\n" +
" \n" +
"</project>");
}
public void testCreatePomQuickFixInDotXmlFolder() throws Exception {
createProjectPom("<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<packaging>pom</packaging>");
importProject();
createProjectPom("<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<packaging>pom</packaging>" +
"<modules>" +
" <module>subDir/new<caret>Module.xml</module>" +
"</modules>");
createProjectSubFile("subDir/newModule.xml/empty"); // ensure that "subDir/newModule.xml" exists as a directory
IntentionAction i = getIntentionAtCaret(getCreateModuleIntention());
assertNotNull(i);
myFixture.launchAction(i);
assertCreateModuleFixResult(
"subDir/newModule.xml/pom.xml",
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" +
"<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n" +
" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n" +
" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n" +
" <modelVersion>4.0.0</modelVersion>\n" +
"\n" +
" <groupId>test</groupId>\n" +
" <artifactId>newModule.xml</artifactId>\n" +
" <version>1</version>\n" +
"\n" +
" \n" +
"</project>");
}
public void testCreatePomQuickFixTakesGroupAndVersionFromSuperParent() {
createProjectPom("<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<packaging>pom</packaging>");
importProject();
createProjectPom("<artifactId>project</artifactId>" +
"<packaging>pom</packaging>" +
"<parent>" +
" <groupId>parentGroup</groupId>" +
" <artifactId>parent</artifactId>" +
" <version>parentVersion</version>" +
"</parent>" +
"<modules>" +
" <module>new<caret>Module</module>" +
"</modules>");
IntentionAction i = getIntentionAtCaret(getCreateModuleIntention());
assertNotNull(i);
myFixture.launchAction(i);
assertCreateModuleFixResult(
"newModule/pom.xml",
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" +
"<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n" +
" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n" +
" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n" +
" <modelVersion>4.0.0</modelVersion>\n" +
"\n" +
" <groupId>parentGroup</groupId>\n" +
" <artifactId>newModule</artifactId>\n" +
" <version>parentVersion</version>\n" +
"\n" +
" \n" +
"</project>");
}
public void testCreatePomQuickFixWithProperties() {
createProjectPom("<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<packaging>pom</packaging>");
importProject();
createProjectPom("<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<properties>" +
" <dirName>subDir</dirName>" +
"</properties>" +
"<modules>" +
" <module>${dirName}/new<caret>Module</module>" +
"</modules>");
IntentionAction i = getIntentionAtCaret(getCreateModuleIntention());
assertNotNull(i);
myFixture.launchAction(i);
VirtualFile pom = myProjectRoot.findFileByRelativePath("subDir/newModule/pom.xml");
assertNotNull(pom);
}
public void testCreatePomQuickFixTakesDefaultGroupAndVersionIfNothingToOffer() {
createProjectPom("<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<packaging>pom</packaging>");
importProject();
createProjectPom("<artifactId>project</artifactId>" +
"<packaging>pom</packaging>" +
"<modules>" +
" <module>new<caret>Module</module>" +
"</modules>");
IntentionAction i = getIntentionAtCaret(getCreateModuleIntention());
assertNotNull(i);
myFixture.launchAction(i);
assertCreateModuleFixResult(
"newModule/pom.xml",
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" +
"<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n" +
" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n" +
" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n" +
" <modelVersion>4.0.0</modelVersion>\n" +
"\n" +
" <groupId>groupId</groupId>\n" +
" <artifactId>newModule</artifactId>\n" +
" <version>version</version>\n" +
"\n" +
" \n" +
"</project>");
}
public void testCreateModuleWithParentQuickFix() {
createProjectPom("<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<packaging>pom</packaging>");
importProject();
createProjectPom("<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<packaging>pom</packaging>" +
"<modules>" +
" <module>new<caret>Module</module>" +
"</modules>");
IntentionAction i = getIntentionAtCaret(getCreateModuleWithParentIntention());
assertNotNull(i);
myFixture.launchAction(i);
assertCreateModuleFixResult(
"newModule/pom.xml",
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" +
"<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n" +
" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n" +
" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n" +
" <modelVersion>4.0.0</modelVersion>\n" +
"\n" +
" <parent>\n" +
" <groupId>test</groupId>\n" +
" <artifactId>project</artifactId>\n" +
" <version>1</version>\n" +
" </parent>\n" +
"\n" +
" <groupId>test</groupId>\n" +
" <artifactId>newModule</artifactId>\n" +
" <version>1</version>\n" +
"\n" +
" \n" +
"</project>");
}
public void testCreateModuleWithParentQuickFix2() {
createProjectPom("<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<packaging>pom</packaging>");
importProject();
createProjectPom("<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<packaging>pom</packaging>" +
"<modules>" +
" <module>ppp/new<caret>Module</module>" +
"</modules>");
IntentionAction i = getIntentionAtCaret(getCreateModuleWithParentIntention());
assertNotNull(i);
myFixture.launchAction(i);
assertCreateModuleFixResult(
"ppp/newModule/pom.xml",
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" +
"<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n" +
" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n" +
" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n" +
" <modelVersion>4.0.0</modelVersion>\n" +
"\n" +
" <parent>\n" +
" <groupId>test</groupId>\n" +
" <artifactId>project</artifactId>\n" +
" <version>1</version>\n" +
" <relativePath>../..</relativePath>\n" +
" </parent>\n" +
"\n" +
" <groupId>test</groupId>\n" +
" <artifactId>newModule</artifactId>\n" +
" <version>1</version>\n" +
"\n" +
" \n" +
"</project>");
}
public void testCreateModuleWithParentQuickFix3() {
VirtualFile parentPom = createModulePom("parent",
"<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<packaging>pom</packaging>");
importProject(parentPom);
myFixture.saveText(parentPom, createPomXml(
"<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<packaging>pom</packaging>" +
"<modules>" +
" <module>../ppp/new<caret>Module</module>" +
"</modules>"));
PsiDocumentManager.getInstance(myProject).commitAllDocuments();
IntentionAction i = getIntentionAtCaret(parentPom, getCreateModuleWithParentIntention());
assertNotNull(i);
myFixture.launchAction(i);
assertCreateModuleFixResult(
"ppp/newModule/pom.xml",
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" +
"<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n" +
" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n" +
" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n" +
" <modelVersion>4.0.0</modelVersion>\n" +
"\n" +
" <parent>\n" +
" <groupId>test</groupId>\n" +
" <artifactId>project</artifactId>\n" +
" <version>1</version>\n" +
" <relativePath>../../parent</relativePath>\n" +
" </parent>\n" +
"\n" +
" <groupId>test</groupId>\n" +
" <artifactId>newModule</artifactId>\n" +
" <version>1</version>\n" +
"\n" +
" \n" +
"</project>");
}
public void testDoesNotShowCreatePomQuickFixForEmptyModuleTag() {
createProjectPom("<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<packaging>pom</packaging>");
importProject();
createProjectPom("<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<packaging>pom</packaging>" +
"<modules>" +
" <module><caret></module>" +
"</modules>");
assertNull(getIntentionAtCaret(getCreateModuleIntention()));
}
public void testDoesNotShowCreatePomQuickFixExistingModule() {
createProjectPom("<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<packaging>pom</packaging>" +
"<modules>" +
" <module>module</module>" +
"</modules>");
createModulePom("module",
"<groupId>test</groupId>" +
"<artifactId>module</artifactId>" +
"<version>1</version>");
importProject();
createProjectPom("<groupId>test</groupId>" +
"<artifactId>project</artifactId>" +
"<version>1</version>" +
"<packaging>pom</packaging>" +
"<modules>" +
" <module>m<caret>odule</module>" +
"</modules>");
assertNull(getIntentionAtCaret(getCreateModuleIntention()));
}
private void assertCreateModuleFixResult(String relativePath, String expectedText) {
VirtualFile pom = myProjectRoot.findFileByRelativePath(relativePath);
assertNotNull(pom);
Document doc = FileDocumentManager.getInstance().getDocument(pom);
Editor selectedEditor = FileEditorManager.getInstance(myProject).getSelectedTextEditor();
assertEquals(doc, selectedEditor.getDocument());
assertEquals(expectedText, doc.getText());
}
private static String getCreateModuleIntention() {
return MavenDomBundle.message("fix.create.module");
}
private static String getCreateModuleWithParentIntention() {
return MavenDomBundle.message("fix.create.module.with.parent");
}
}
| |
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.cloudfront.model.transform;
import static com.amazonaws.util.StringUtils.UTF8;
import java.io.StringWriter;
import java.io.UnsupportedEncodingException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.amazonaws.AmazonClientException;
import com.amazonaws.Request;
import com.amazonaws.DefaultRequest;
import com.amazonaws.http.HttpMethodName;
import com.amazonaws.services.cloudfront.model.*;
import com.amazonaws.transform.Marshaller;
import com.amazonaws.util.StringInputStream;
import com.amazonaws.util.StringUtils;
import com.amazonaws.util.IdempotentUtils;
import com.amazonaws.util.XMLWriter;
/**
* CreateDistributionRequest Marshaller
*/
public class CreateDistributionRequestMarshaller
implements
Marshaller<Request<CreateDistributionRequest>, CreateDistributionRequest> {
public Request<CreateDistributionRequest> marshall(
CreateDistributionRequest createDistributionRequest) {
if (createDistributionRequest == null) {
throw new AmazonClientException(
"Invalid argument passed to marshall(...)");
}
Request<CreateDistributionRequest> request = new DefaultRequest<CreateDistributionRequest>(
createDistributionRequest, "AmazonCloudFront");
request.setHttpMethod(HttpMethodName.POST);
String uriResourcePath = "/2016-01-28/distribution";
request.setResourcePath(uriResourcePath);
try {
StringWriter stringWriter = new StringWriter();
XMLWriter xmlWriter = new XMLWriter(stringWriter,
"http://cloudfront.amazonaws.com/doc/2016-01-28/");
DistributionConfig distributionConfig = createDistributionRequest
.getDistributionConfig();
if (distributionConfig != null) {
xmlWriter.startElement("DistributionConfig");
if (distributionConfig.getCallerReference() != null) {
xmlWriter.startElement("CallerReference")
.value(distributionConfig.getCallerReference())
.endElement();
}
Aliases aliases = distributionConfig.getAliases();
if (aliases != null) {
xmlWriter.startElement("Aliases");
if (aliases.getQuantity() != null) {
xmlWriter.startElement("Quantity")
.value(aliases.getQuantity()).endElement();
}
com.amazonaws.internal.SdkInternalList<String> aliasesItemsList = (com.amazonaws.internal.SdkInternalList<String>) aliases
.getItems();
if (!aliasesItemsList.isEmpty()
|| !aliasesItemsList.isAutoConstruct()) {
xmlWriter.startElement("Items");
for (String aliasesItemsListValue : aliasesItemsList) {
xmlWriter.startElement("CNAME");
xmlWriter.value(aliasesItemsListValue);
xmlWriter.endElement();
}
xmlWriter.endElement();
}
xmlWriter.endElement();
}
if (distributionConfig.getDefaultRootObject() != null) {
xmlWriter.startElement("DefaultRootObject")
.value(distributionConfig.getDefaultRootObject())
.endElement();
}
Origins origins = distributionConfig.getOrigins();
if (origins != null) {
xmlWriter.startElement("Origins");
if (origins.getQuantity() != null) {
xmlWriter.startElement("Quantity")
.value(origins.getQuantity()).endElement();
}
com.amazonaws.internal.SdkInternalList<Origin> originsItemsList = (com.amazonaws.internal.SdkInternalList<Origin>) origins
.getItems();
if (!originsItemsList.isEmpty()
|| !originsItemsList.isAutoConstruct()) {
xmlWriter.startElement("Items");
for (Origin originsItemsListValue : originsItemsList) {
xmlWriter.startElement("Origin");
if (originsItemsListValue.getId() != null) {
xmlWriter.startElement("Id")
.value(originsItemsListValue.getId())
.endElement();
}
if (originsItemsListValue.getDomainName() != null) {
xmlWriter
.startElement("DomainName")
.value(originsItemsListValue
.getDomainName()).endElement();
}
if (originsItemsListValue.getOriginPath() != null) {
xmlWriter
.startElement("OriginPath")
.value(originsItemsListValue
.getOriginPath()).endElement();
}
CustomHeaders customHeaders = originsItemsListValue
.getCustomHeaders();
if (customHeaders != null) {
xmlWriter.startElement("CustomHeaders");
if (customHeaders.getQuantity() != null) {
xmlWriter.startElement("Quantity")
.value(customHeaders.getQuantity())
.endElement();
}
com.amazonaws.internal.SdkInternalList<OriginCustomHeader> customHeadersItemsList = (com.amazonaws.internal.SdkInternalList<OriginCustomHeader>) customHeaders
.getItems();
if (!customHeadersItemsList.isEmpty()
|| !customHeadersItemsList
.isAutoConstruct()) {
xmlWriter.startElement("Items");
for (OriginCustomHeader customHeadersItemsListValue : customHeadersItemsList) {
xmlWriter
.startElement("OriginCustomHeader");
if (customHeadersItemsListValue
.getHeaderName() != null) {
xmlWriter
.startElement("HeaderName")
.value(customHeadersItemsListValue
.getHeaderName())
.endElement();
}
if (customHeadersItemsListValue
.getHeaderValue() != null) {
xmlWriter
.startElement("HeaderValue")
.value(customHeadersItemsListValue
.getHeaderValue())
.endElement();
}
xmlWriter.endElement();
}
xmlWriter.endElement();
}
xmlWriter.endElement();
}
S3OriginConfig s3OriginConfig = originsItemsListValue
.getS3OriginConfig();
if (s3OriginConfig != null) {
xmlWriter.startElement("S3OriginConfig");
if (s3OriginConfig.getOriginAccessIdentity() != null) {
xmlWriter
.startElement(
"OriginAccessIdentity")
.value(s3OriginConfig
.getOriginAccessIdentity())
.endElement();
}
xmlWriter.endElement();
}
CustomOriginConfig customOriginConfig = originsItemsListValue
.getCustomOriginConfig();
if (customOriginConfig != null) {
xmlWriter.startElement("CustomOriginConfig");
if (customOriginConfig.getHTTPPort() != null) {
xmlWriter
.startElement("HTTPPort")
.value(customOriginConfig
.getHTTPPort())
.endElement();
}
if (customOriginConfig.getHTTPSPort() != null) {
xmlWriter
.startElement("HTTPSPort")
.value(customOriginConfig
.getHTTPSPort())
.endElement();
}
if (customOriginConfig
.getOriginProtocolPolicy() != null) {
xmlWriter
.startElement(
"OriginProtocolPolicy")
.value(customOriginConfig
.getOriginProtocolPolicy())
.endElement();
}
OriginSslProtocols originSslProtocols = customOriginConfig
.getOriginSslProtocols();
if (originSslProtocols != null) {
xmlWriter
.startElement("OriginSslProtocols");
if (originSslProtocols.getQuantity() != null) {
xmlWriter
.startElement("Quantity")
.value(originSslProtocols
.getQuantity())
.endElement();
}
com.amazonaws.internal.SdkInternalList<String> originSslProtocolsItemsList = (com.amazonaws.internal.SdkInternalList<String>) originSslProtocols
.getItems();
if (!originSslProtocolsItemsList.isEmpty()
|| !originSslProtocolsItemsList
.isAutoConstruct()) {
xmlWriter.startElement("Items");
for (String originSslProtocolsItemsListValue : originSslProtocolsItemsList) {
xmlWriter
.startElement("SslProtocol");
xmlWriter
.value(originSslProtocolsItemsListValue);
xmlWriter.endElement();
}
xmlWriter.endElement();
}
xmlWriter.endElement();
}
xmlWriter.endElement();
}
xmlWriter.endElement();
}
xmlWriter.endElement();
}
xmlWriter.endElement();
}
DefaultCacheBehavior defaultCacheBehavior = distributionConfig
.getDefaultCacheBehavior();
if (defaultCacheBehavior != null) {
xmlWriter.startElement("DefaultCacheBehavior");
if (defaultCacheBehavior.getTargetOriginId() != null) {
xmlWriter
.startElement("TargetOriginId")
.value(defaultCacheBehavior.getTargetOriginId())
.endElement();
}
ForwardedValues forwardedValues = defaultCacheBehavior
.getForwardedValues();
if (forwardedValues != null) {
xmlWriter.startElement("ForwardedValues");
if (forwardedValues.getQueryString() != null) {
xmlWriter.startElement("QueryString")
.value(forwardedValues.getQueryString())
.endElement();
}
CookiePreference cookies = forwardedValues.getCookies();
if (cookies != null) {
xmlWriter.startElement("Cookies");
if (cookies.getForward() != null) {
xmlWriter.startElement("Forward")
.value(cookies.getForward())
.endElement();
}
CookieNames whitelistedNames = cookies
.getWhitelistedNames();
if (whitelistedNames != null) {
xmlWriter.startElement("WhitelistedNames");
if (whitelistedNames.getQuantity() != null) {
xmlWriter
.startElement("Quantity")
.value(whitelistedNames
.getQuantity())
.endElement();
}
com.amazonaws.internal.SdkInternalList<String> cookieNamesItemsList = (com.amazonaws.internal.SdkInternalList<String>) whitelistedNames
.getItems();
if (!cookieNamesItemsList.isEmpty()
|| !cookieNamesItemsList
.isAutoConstruct()) {
xmlWriter.startElement("Items");
for (String cookieNamesItemsListValue : cookieNamesItemsList) {
xmlWriter.startElement("Name");
xmlWriter
.value(cookieNamesItemsListValue);
xmlWriter.endElement();
}
xmlWriter.endElement();
}
xmlWriter.endElement();
}
xmlWriter.endElement();
}
Headers headers = forwardedValues.getHeaders();
if (headers != null) {
xmlWriter.startElement("Headers");
if (headers.getQuantity() != null) {
xmlWriter.startElement("Quantity")
.value(headers.getQuantity())
.endElement();
}
com.amazonaws.internal.SdkInternalList<String> headersItemsList = (com.amazonaws.internal.SdkInternalList<String>) headers
.getItems();
if (!headersItemsList.isEmpty()
|| !headersItemsList.isAutoConstruct()) {
xmlWriter.startElement("Items");
for (String headersItemsListValue : headersItemsList) {
xmlWriter.startElement("Name");
xmlWriter.value(headersItemsListValue);
xmlWriter.endElement();
}
xmlWriter.endElement();
}
xmlWriter.endElement();
}
xmlWriter.endElement();
}
TrustedSigners trustedSigners = defaultCacheBehavior
.getTrustedSigners();
if (trustedSigners != null) {
xmlWriter.startElement("TrustedSigners");
if (trustedSigners.getEnabled() != null) {
xmlWriter.startElement("Enabled")
.value(trustedSigners.getEnabled())
.endElement();
}
if (trustedSigners.getQuantity() != null) {
xmlWriter.startElement("Quantity")
.value(trustedSigners.getQuantity())
.endElement();
}
com.amazonaws.internal.SdkInternalList<String> trustedSignersItemsList = (com.amazonaws.internal.SdkInternalList<String>) trustedSigners
.getItems();
if (!trustedSignersItemsList.isEmpty()
|| !trustedSignersItemsList.isAutoConstruct()) {
xmlWriter.startElement("Items");
for (String trustedSignersItemsListValue : trustedSignersItemsList) {
xmlWriter.startElement("AwsAccountNumber");
xmlWriter.value(trustedSignersItemsListValue);
xmlWriter.endElement();
}
xmlWriter.endElement();
}
xmlWriter.endElement();
}
if (defaultCacheBehavior.getViewerProtocolPolicy() != null) {
xmlWriter
.startElement("ViewerProtocolPolicy")
.value(defaultCacheBehavior
.getViewerProtocolPolicy())
.endElement();
}
if (defaultCacheBehavior.getMinTTL() != null) {
xmlWriter.startElement("MinTTL")
.value(defaultCacheBehavior.getMinTTL())
.endElement();
}
AllowedMethods allowedMethods = defaultCacheBehavior
.getAllowedMethods();
if (allowedMethods != null) {
xmlWriter.startElement("AllowedMethods");
if (allowedMethods.getQuantity() != null) {
xmlWriter.startElement("Quantity")
.value(allowedMethods.getQuantity())
.endElement();
}
com.amazonaws.internal.SdkInternalList<String> allowedMethodsItemsList = (com.amazonaws.internal.SdkInternalList<String>) allowedMethods
.getItems();
if (!allowedMethodsItemsList.isEmpty()
|| !allowedMethodsItemsList.isAutoConstruct()) {
xmlWriter.startElement("Items");
for (String allowedMethodsItemsListValue : allowedMethodsItemsList) {
xmlWriter.startElement("Method");
xmlWriter.value(allowedMethodsItemsListValue);
xmlWriter.endElement();
}
xmlWriter.endElement();
}
CachedMethods cachedMethods = allowedMethods
.getCachedMethods();
if (cachedMethods != null) {
xmlWriter.startElement("CachedMethods");
if (cachedMethods.getQuantity() != null) {
xmlWriter.startElement("Quantity")
.value(cachedMethods.getQuantity())
.endElement();
}
com.amazonaws.internal.SdkInternalList<String> cachedMethodsItemsList = (com.amazonaws.internal.SdkInternalList<String>) cachedMethods
.getItems();
if (!cachedMethodsItemsList.isEmpty()
|| !cachedMethodsItemsList
.isAutoConstruct()) {
xmlWriter.startElement("Items");
for (String cachedMethodsItemsListValue : cachedMethodsItemsList) {
xmlWriter.startElement("Method");
xmlWriter
.value(cachedMethodsItemsListValue);
xmlWriter.endElement();
}
xmlWriter.endElement();
}
xmlWriter.endElement();
}
xmlWriter.endElement();
}
if (defaultCacheBehavior.getSmoothStreaming() != null) {
xmlWriter
.startElement("SmoothStreaming")
.value(defaultCacheBehavior
.getSmoothStreaming()).endElement();
}
if (defaultCacheBehavior.getDefaultTTL() != null) {
xmlWriter.startElement("DefaultTTL")
.value(defaultCacheBehavior.getDefaultTTL())
.endElement();
}
if (defaultCacheBehavior.getMaxTTL() != null) {
xmlWriter.startElement("MaxTTL")
.value(defaultCacheBehavior.getMaxTTL())
.endElement();
}
if (defaultCacheBehavior.getCompress() != null) {
xmlWriter.startElement("Compress")
.value(defaultCacheBehavior.getCompress())
.endElement();
}
xmlWriter.endElement();
}
CacheBehaviors cacheBehaviors = distributionConfig
.getCacheBehaviors();
if (cacheBehaviors != null) {
xmlWriter.startElement("CacheBehaviors");
if (cacheBehaviors.getQuantity() != null) {
xmlWriter.startElement("Quantity")
.value(cacheBehaviors.getQuantity())
.endElement();
}
com.amazonaws.internal.SdkInternalList<CacheBehavior> cacheBehaviorsItemsList = (com.amazonaws.internal.SdkInternalList<CacheBehavior>) cacheBehaviors
.getItems();
if (!cacheBehaviorsItemsList.isEmpty()
|| !cacheBehaviorsItemsList.isAutoConstruct()) {
xmlWriter.startElement("Items");
for (CacheBehavior cacheBehaviorsItemsListValue : cacheBehaviorsItemsList) {
xmlWriter.startElement("CacheBehavior");
if (cacheBehaviorsItemsListValue.getPathPattern() != null) {
xmlWriter
.startElement("PathPattern")
.value(cacheBehaviorsItemsListValue
.getPathPattern()).endElement();
}
if (cacheBehaviorsItemsListValue
.getTargetOriginId() != null) {
xmlWriter
.startElement("TargetOriginId")
.value(cacheBehaviorsItemsListValue
.getTargetOriginId())
.endElement();
}
ForwardedValues forwardedValues = cacheBehaviorsItemsListValue
.getForwardedValues();
if (forwardedValues != null) {
xmlWriter.startElement("ForwardedValues");
if (forwardedValues.getQueryString() != null) {
xmlWriter
.startElement("QueryString")
.value(forwardedValues
.getQueryString())
.endElement();
}
CookiePreference cookies = forwardedValues
.getCookies();
if (cookies != null) {
xmlWriter.startElement("Cookies");
if (cookies.getForward() != null) {
xmlWriter.startElement("Forward")
.value(cookies.getForward())
.endElement();
}
CookieNames whitelistedNames = cookies
.getWhitelistedNames();
if (whitelistedNames != null) {
xmlWriter
.startElement("WhitelistedNames");
if (whitelistedNames.getQuantity() != null) {
xmlWriter
.startElement("Quantity")
.value(whitelistedNames
.getQuantity())
.endElement();
}
com.amazonaws.internal.SdkInternalList<String> cookieNamesItemsList = (com.amazonaws.internal.SdkInternalList<String>) whitelistedNames
.getItems();
if (!cookieNamesItemsList.isEmpty()
|| !cookieNamesItemsList
.isAutoConstruct()) {
xmlWriter.startElement("Items");
for (String cookieNamesItemsListValue : cookieNamesItemsList) {
xmlWriter.startElement("Name");
xmlWriter
.value(cookieNamesItemsListValue);
xmlWriter.endElement();
}
xmlWriter.endElement();
}
xmlWriter.endElement();
}
xmlWriter.endElement();
}
Headers headers = forwardedValues.getHeaders();
if (headers != null) {
xmlWriter.startElement("Headers");
if (headers.getQuantity() != null) {
xmlWriter.startElement("Quantity")
.value(headers.getQuantity())
.endElement();
}
com.amazonaws.internal.SdkInternalList<String> headersItemsList = (com.amazonaws.internal.SdkInternalList<String>) headers
.getItems();
if (!headersItemsList.isEmpty()
|| !headersItemsList
.isAutoConstruct()) {
xmlWriter.startElement("Items");
for (String headersItemsListValue : headersItemsList) {
xmlWriter.startElement("Name");
xmlWriter
.value(headersItemsListValue);
xmlWriter.endElement();
}
xmlWriter.endElement();
}
xmlWriter.endElement();
}
xmlWriter.endElement();
}
TrustedSigners trustedSigners = cacheBehaviorsItemsListValue
.getTrustedSigners();
if (trustedSigners != null) {
xmlWriter.startElement("TrustedSigners");
if (trustedSigners.getEnabled() != null) {
xmlWriter.startElement("Enabled")
.value(trustedSigners.getEnabled())
.endElement();
}
if (trustedSigners.getQuantity() != null) {
xmlWriter
.startElement("Quantity")
.value(trustedSigners.getQuantity())
.endElement();
}
com.amazonaws.internal.SdkInternalList<String> trustedSignersItemsList = (com.amazonaws.internal.SdkInternalList<String>) trustedSigners
.getItems();
if (!trustedSignersItemsList.isEmpty()
|| !trustedSignersItemsList
.isAutoConstruct()) {
xmlWriter.startElement("Items");
for (String trustedSignersItemsListValue : trustedSignersItemsList) {
xmlWriter
.startElement("AwsAccountNumber");
xmlWriter
.value(trustedSignersItemsListValue);
xmlWriter.endElement();
}
xmlWriter.endElement();
}
xmlWriter.endElement();
}
if (cacheBehaviorsItemsListValue
.getViewerProtocolPolicy() != null) {
xmlWriter
.startElement("ViewerProtocolPolicy")
.value(cacheBehaviorsItemsListValue
.getViewerProtocolPolicy())
.endElement();
}
if (cacheBehaviorsItemsListValue.getMinTTL() != null) {
xmlWriter
.startElement("MinTTL")
.value(cacheBehaviorsItemsListValue
.getMinTTL()).endElement();
}
AllowedMethods allowedMethods = cacheBehaviorsItemsListValue
.getAllowedMethods();
if (allowedMethods != null) {
xmlWriter.startElement("AllowedMethods");
if (allowedMethods.getQuantity() != null) {
xmlWriter
.startElement("Quantity")
.value(allowedMethods.getQuantity())
.endElement();
}
com.amazonaws.internal.SdkInternalList<String> allowedMethodsItemsList = (com.amazonaws.internal.SdkInternalList<String>) allowedMethods
.getItems();
if (!allowedMethodsItemsList.isEmpty()
|| !allowedMethodsItemsList
.isAutoConstruct()) {
xmlWriter.startElement("Items");
for (String allowedMethodsItemsListValue : allowedMethodsItemsList) {
xmlWriter.startElement("Method");
xmlWriter
.value(allowedMethodsItemsListValue);
xmlWriter.endElement();
}
xmlWriter.endElement();
}
CachedMethods cachedMethods = allowedMethods
.getCachedMethods();
if (cachedMethods != null) {
xmlWriter.startElement("CachedMethods");
if (cachedMethods.getQuantity() != null) {
xmlWriter
.startElement("Quantity")
.value(cachedMethods
.getQuantity())
.endElement();
}
com.amazonaws.internal.SdkInternalList<String> cachedMethodsItemsList = (com.amazonaws.internal.SdkInternalList<String>) cachedMethods
.getItems();
if (!cachedMethodsItemsList.isEmpty()
|| !cachedMethodsItemsList
.isAutoConstruct()) {
xmlWriter.startElement("Items");
for (String cachedMethodsItemsListValue : cachedMethodsItemsList) {
xmlWriter.startElement("Method");
xmlWriter
.value(cachedMethodsItemsListValue);
xmlWriter.endElement();
}
xmlWriter.endElement();
}
xmlWriter.endElement();
}
xmlWriter.endElement();
}
if (cacheBehaviorsItemsListValue
.getSmoothStreaming() != null) {
xmlWriter
.startElement("SmoothStreaming")
.value(cacheBehaviorsItemsListValue
.getSmoothStreaming())
.endElement();
}
if (cacheBehaviorsItemsListValue.getDefaultTTL() != null) {
xmlWriter
.startElement("DefaultTTL")
.value(cacheBehaviorsItemsListValue
.getDefaultTTL()).endElement();
}
if (cacheBehaviorsItemsListValue.getMaxTTL() != null) {
xmlWriter
.startElement("MaxTTL")
.value(cacheBehaviorsItemsListValue
.getMaxTTL()).endElement();
}
if (cacheBehaviorsItemsListValue.getCompress() != null) {
xmlWriter
.startElement("Compress")
.value(cacheBehaviorsItemsListValue
.getCompress()).endElement();
}
xmlWriter.endElement();
}
xmlWriter.endElement();
}
xmlWriter.endElement();
}
CustomErrorResponses customErrorResponses = distributionConfig
.getCustomErrorResponses();
if (customErrorResponses != null) {
xmlWriter.startElement("CustomErrorResponses");
if (customErrorResponses.getQuantity() != null) {
xmlWriter.startElement("Quantity")
.value(customErrorResponses.getQuantity())
.endElement();
}
com.amazonaws.internal.SdkInternalList<CustomErrorResponse> customErrorResponsesItemsList = (com.amazonaws.internal.SdkInternalList<CustomErrorResponse>) customErrorResponses
.getItems();
if (!customErrorResponsesItemsList.isEmpty()
|| !customErrorResponsesItemsList.isAutoConstruct()) {
xmlWriter.startElement("Items");
for (CustomErrorResponse customErrorResponsesItemsListValue : customErrorResponsesItemsList) {
xmlWriter.startElement("CustomErrorResponse");
if (customErrorResponsesItemsListValue
.getErrorCode() != null) {
xmlWriter
.startElement("ErrorCode")
.value(customErrorResponsesItemsListValue
.getErrorCode()).endElement();
}
if (customErrorResponsesItemsListValue
.getResponsePagePath() != null) {
xmlWriter
.startElement("ResponsePagePath")
.value(customErrorResponsesItemsListValue
.getResponsePagePath())
.endElement();
}
if (customErrorResponsesItemsListValue
.getResponseCode() != null) {
xmlWriter
.startElement("ResponseCode")
.value(customErrorResponsesItemsListValue
.getResponseCode())
.endElement();
}
if (customErrorResponsesItemsListValue
.getErrorCachingMinTTL() != null) {
xmlWriter
.startElement("ErrorCachingMinTTL")
.value(customErrorResponsesItemsListValue
.getErrorCachingMinTTL())
.endElement();
}
xmlWriter.endElement();
}
xmlWriter.endElement();
}
xmlWriter.endElement();
}
if (distributionConfig.getComment() != null) {
xmlWriter.startElement("Comment")
.value(distributionConfig.getComment())
.endElement();
}
LoggingConfig logging = distributionConfig.getLogging();
if (logging != null) {
xmlWriter.startElement("Logging");
if (logging.getEnabled() != null) {
xmlWriter.startElement("Enabled")
.value(logging.getEnabled()).endElement();
}
if (logging.getIncludeCookies() != null) {
xmlWriter.startElement("IncludeCookies")
.value(logging.getIncludeCookies())
.endElement();
}
if (logging.getBucket() != null) {
xmlWriter.startElement("Bucket")
.value(logging.getBucket()).endElement();
}
if (logging.getPrefix() != null) {
xmlWriter.startElement("Prefix")
.value(logging.getPrefix()).endElement();
}
xmlWriter.endElement();
}
if (distributionConfig.getPriceClass() != null) {
xmlWriter.startElement("PriceClass")
.value(distributionConfig.getPriceClass())
.endElement();
}
if (distributionConfig.getEnabled() != null) {
xmlWriter.startElement("Enabled")
.value(distributionConfig.getEnabled())
.endElement();
}
ViewerCertificate viewerCertificate = distributionConfig
.getViewerCertificate();
if (viewerCertificate != null) {
xmlWriter.startElement("ViewerCertificate");
if (viewerCertificate.getCloudFrontDefaultCertificate() != null) {
xmlWriter
.startElement("CloudFrontDefaultCertificate")
.value(viewerCertificate
.getCloudFrontDefaultCertificate())
.endElement();
}
if (viewerCertificate.getIAMCertificateId() != null) {
xmlWriter.startElement("IAMCertificateId")
.value(viewerCertificate.getIAMCertificateId())
.endElement();
}
if (viewerCertificate.getACMCertificateArn() != null) {
xmlWriter
.startElement("ACMCertificateArn")
.value(viewerCertificate.getACMCertificateArn())
.endElement();
}
if (viewerCertificate.getSSLSupportMethod() != null) {
xmlWriter.startElement("SSLSupportMethod")
.value(viewerCertificate.getSSLSupportMethod())
.endElement();
}
if (viewerCertificate.getMinimumProtocolVersion() != null) {
xmlWriter
.startElement("MinimumProtocolVersion")
.value(viewerCertificate
.getMinimumProtocolVersion())
.endElement();
}
if (viewerCertificate.getCertificate() != null) {
xmlWriter.startElement("Certificate")
.value(viewerCertificate.getCertificate())
.endElement();
}
if (viewerCertificate.getCertificateSource() != null) {
xmlWriter
.startElement("CertificateSource")
.value(viewerCertificate.getCertificateSource())
.endElement();
}
xmlWriter.endElement();
}
Restrictions restrictions = distributionConfig
.getRestrictions();
if (restrictions != null) {
xmlWriter.startElement("Restrictions");
GeoRestriction geoRestriction = restrictions
.getGeoRestriction();
if (geoRestriction != null) {
xmlWriter.startElement("GeoRestriction");
if (geoRestriction.getRestrictionType() != null) {
xmlWriter.startElement("RestrictionType")
.value(geoRestriction.getRestrictionType())
.endElement();
}
if (geoRestriction.getQuantity() != null) {
xmlWriter.startElement("Quantity")
.value(geoRestriction.getQuantity())
.endElement();
}
com.amazonaws.internal.SdkInternalList<String> geoRestrictionItemsList = (com.amazonaws.internal.SdkInternalList<String>) geoRestriction
.getItems();
if (!geoRestrictionItemsList.isEmpty()
|| !geoRestrictionItemsList.isAutoConstruct()) {
xmlWriter.startElement("Items");
for (String geoRestrictionItemsListValue : geoRestrictionItemsList) {
xmlWriter.startElement("Location");
xmlWriter.value(geoRestrictionItemsListValue);
xmlWriter.endElement();
}
xmlWriter.endElement();
}
xmlWriter.endElement();
}
xmlWriter.endElement();
}
if (distributionConfig.getWebACLId() != null) {
xmlWriter.startElement("WebACLId")
.value(distributionConfig.getWebACLId())
.endElement();
}
xmlWriter.endElement();
}
request.setContent(new StringInputStream(stringWriter.getBuffer()
.toString()));
request.addHeader(
"Content-Length",
Integer.toString(stringWriter.getBuffer().toString()
.getBytes(UTF8).length));
if (!request.getHeaders().containsKey("Content-Type")) {
request.addHeader("Content-Type", "application/xml");
}
} catch (Throwable t) {
throw new AmazonClientException(
"Unable to marshall request to XML: " + t.getMessage(), t);
}
return request;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.master;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import com.google.common.collect.Sets;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.MetaTableAccessor;
import org.apache.hadoop.hbase.TableDescriptor;
import org.apache.hadoop.hbase.TableDescriptors;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.TableState;
/**
* This is a helper class used to manage table states.
* States persisted in tableinfo and cached internally.
*/
@InterfaceAudience.Private
public class TableStateManager {
private static final Log LOG = LogFactory.getLog(TableStateManager.class);
private final ReadWriteLock lock = new ReentrantReadWriteLock();
private final MasterServices master;
public TableStateManager(MasterServices master) {
this.master = master;
}
/**
* Set table state to provided.
* Caller should lock table on write.
* @param tableName table to change state for
* @param newState new state
* @throws IOException
*/
public void setTableState(TableName tableName, TableState.State newState) throws IOException {
lock.writeLock().lock();
try {
udpateMetaState(tableName, newState);
} finally {
lock.writeLock().unlock();
}
}
/**
* Set table state to provided but only if table in specified states
* Caller should lock table on write.
* @param tableName table to change state for
* @param newState new state
* @param states states to check against
* @throws IOException
*/
public boolean setTableStateIfInStates(TableName tableName,
TableState.State newState,
TableState.State... states)
throws IOException {
lock.writeLock().lock();
try {
TableState currentState = readMetaState(tableName);
if (currentState == null) {
throw new TableNotFoundException(tableName);
}
if (currentState.inStates(states)) {
udpateMetaState(tableName, newState);
return true;
} else {
return false;
}
} finally {
lock.writeLock().unlock();
}
}
/**
* Set table state to provided but only if table not in specified states
* Caller should lock table on write.
* @param tableName table to change state for
* @param newState new state
* @param states states to check against
* @throws IOException
*/
public boolean setTableStateIfNotInStates(TableName tableName,
TableState.State newState,
TableState.State... states)
throws IOException {
TableState currentState = readMetaState(tableName);
if (currentState == null) {
throw new TableNotFoundException(tableName);
}
if (!currentState.inStates(states)) {
udpateMetaState(tableName, newState);
return true;
} else {
return false;
}
}
public boolean isTableState(TableName tableName, TableState.State... states) {
try {
TableState.State tableState = getTableState(tableName);
return TableState.isInStates(tableState, states);
} catch (IOException e) {
LOG.error("Unable to get table " + tableName + " state, probably table not exists");
return false;
}
}
public void setDeletedTable(TableName tableName) throws IOException {
if (tableName.equals(TableName.META_TABLE_NAME))
return;
MetaTableAccessor.deleteTableState(master.getConnection(), tableName);
}
public boolean isTablePresent(TableName tableName) throws IOException {
return readMetaState(tableName) != null;
}
/**
* Return all tables in given states.
*
* @param states filter by states
* @return tables in given states
* @throws IOException
*/
public Set<TableName> getTablesInStates(final TableState.State... states) throws IOException {
final Set<TableName> rv = Sets.newHashSet();
MetaTableAccessor.fullScanTables(master.getConnection(), new MetaTableAccessor.Visitor() {
@Override
public boolean visit(Result r) throws IOException {
TableState tableState = MetaTableAccessor.getTableState(r);
if (tableState != null && tableState.inStates(states))
rv.add(tableState.getTableName());
return true;
}
});
return rv;
}
@Nonnull
public TableState.State getTableState(TableName tableName) throws IOException {
TableState currentState = readMetaState(tableName);
if (currentState == null) {
throw new TableNotFoundException(tableName);
}
return currentState.getState();
}
protected void udpateMetaState(TableName tableName, TableState.State newState)
throws IOException {
MetaTableAccessor.updateTableState(master.getConnection(), tableName, newState);
}
@Nullable
protected TableState readMetaState(TableName tableName) throws IOException {
if (tableName.equals(TableName.META_TABLE_NAME))
return new TableState(tableName, TableState.State.ENABLED);
return MetaTableAccessor.getTableState(master.getConnection(), tableName);
}
@SuppressWarnings("deprecation")
public void start() throws IOException {
TableDescriptors tableDescriptors = master.getTableDescriptors();
Connection connection = master.getConnection();
fixTableStates(tableDescriptors, connection);
}
public static void fixTableStates(TableDescriptors tableDescriptors, Connection connection)
throws IOException {
final Map<String, TableDescriptor> allDescriptors =
tableDescriptors.getAllDescriptors();
final Map<String, TableState> states = new HashMap<>();
MetaTableAccessor.fullScanTables(connection, new MetaTableAccessor.Visitor() {
@Override
public boolean visit(Result r) throws IOException {
TableState state = MetaTableAccessor.getTableState(r);
if (state != null)
states.put(state.getTableName().getNameAsString(), state);
return true;
}
});
for (Map.Entry<String, TableDescriptor> entry : allDescriptors.entrySet()) {
String table = entry.getKey();
if (table.equals(TableName.META_TABLE_NAME.getNameAsString()))
continue;
if (!states.containsKey(table)) {
LOG.warn("Found table with no state, assuming ENABLED");
MetaTableAccessor.updateTableState(connection, TableName.valueOf(table),
TableState.State.ENABLED);
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.spi.deployment.uri.scanners;
import java.io.File;
import java.io.FilenameFilter;
import java.io.IOException;
import java.net.URI;
import org.apache.ignite.IgniteLogger;
import org.apache.ignite.internal.util.tostring.GridToStringExclude;
import org.apache.ignite.internal.util.typedef.internal.S;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.spi.IgniteSpiThread;
/**
* URI deployment scanner manager.
*/
public class UriDeploymentScannerManager implements UriDeploymentScannerContext {
/** Grid name. */
private final String gridName;
/** URI that scanner should looks after. */
@GridToStringExclude
private final URI uri;
/** Temporary deployment directory. */
private final File deployDir;
/** Scan frequency. */
private final long freq;
/** Found files filter. */
private final FilenameFilter filter;
/** Scanner listener which should be notified about changes. */
private final GridUriDeploymentScannerListener lsnr;
/** Logger. */
private final IgniteLogger log;
/** Underlying scanner. */
private final UriDeploymentScanner scanner;
/** Scanner implementation. */
private IgniteSpiThread scannerThread;
/** Whether first scan completed or not. */
private boolean firstScan = true;
/**
* Creates new scanner.
*
* @param gridName Grid name.
* @param uri URI which scanner should looks after.
* @param deployDir Temporary deployment directory.
* @param freq Scan frequency.
* @param filter Found files filter.
* @param lsnr Scanner listener which should be notifier about changes.
* @param log Logger.
* @param scanner Scanner.
*/
public UriDeploymentScannerManager(
String gridName,
URI uri,
File deployDir,
long freq,
FilenameFilter filter,
GridUriDeploymentScannerListener lsnr,
IgniteLogger log,
UriDeploymentScanner scanner) {
assert uri != null;
assert freq > 0;
assert deployDir != null;
assert filter != null;
assert log != null;
assert lsnr != null;
assert scanner != null;
this.gridName = gridName;
this.uri = uri;
this.deployDir = deployDir;
this.freq = freq;
this.filter = filter;
this.log = log.getLogger(getClass());
this.lsnr = lsnr;
this.scanner = scanner;
}
/**
* Starts scanner.
*/
public void start() {
scannerThread = new IgniteSpiThread(gridName, "grid-uri-scanner", log) {
/** {@inheritDoc} */
@SuppressWarnings({"BusyWait"})
@Override protected void body() throws InterruptedException {
try {
while (!isInterrupted()) {
try {
scanner.scan(UriDeploymentScannerManager.this);
}
finally {
// Do it in finally to avoid any hanging.
if (firstScan) {
firstScan = false;
lsnr.onFirstScanFinished();
}
}
Thread.sleep(freq);
}
}
finally {
// Double check. If we were cancelled before anything has been scanned.
if (firstScan) {
firstScan = false;
lsnr.onFirstScanFinished();
}
}
}
};
scannerThread.start();
if (log.isDebugEnabled())
log.debug("Grid URI deployment scanner started: " + this);
}
/**
* Cancels scanner execution.
*/
public void cancel() {
U.interrupt(scannerThread);
}
/**
* Joins scanner thread.
*/
public void join() {
U.join(scannerThread, log);
if (log.isDebugEnabled())
log.debug("Grid URI deployment scanner stopped: " + this);
}
/** {@inheritDoc} */
public boolean isCancelled() {
assert scannerThread != null;
return scannerThread.isInterrupted();
}
/** {@inheritDoc} */
public File createTempFile(String fileName, File tmpDir) throws IOException {
assert fileName != null;
int idx = fileName.lastIndexOf('.');
if (idx == -1)
idx = fileName.length();
String prefix = fileName.substring(0, idx);
if (idx < 3) { // Prefix must be at least 3 characters long. See File.createTempFile(...).
prefix += "___";
}
String suffix = fileName.substring(idx);
return File.createTempFile(prefix, suffix, tmpDir);
}
/** {@inheritDoc} */
public boolean isFirstScan() {
return firstScan;
}
/** {@inheritDoc} */
public URI getUri() {
return uri;
}
/** {@inheritDoc} */
public File getDeployDirectory() {
return deployDir;
}
/** {@inheritDoc} */
public FilenameFilter getFilter() {
return filter;
}
/** {@inheritDoc} */
public GridUriDeploymentScannerListener getListener() {
return lsnr;
}
/** {@inheritDoc} */
public IgniteLogger getLogger() {
return log;
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(UriDeploymentScannerManager.class, this, "uri", U.hidePassword(uri.toString()));
}
}
| |
/*
* Copyright 2014 Uwe Trottmann
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.battlelancer.seriesguide.ui;
import android.annotation.TargetApi;
import android.app.ActionBar;
import android.content.Intent;
import android.nfc.NdefMessage;
import android.nfc.NdefRecord;
import android.nfc.NfcAdapter;
import android.nfc.NfcAdapter.CreateNdefMessageCallback;
import android.nfc.NfcEvent;
import android.os.Build;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentTransaction;
import android.support.v4.view.ViewPager;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import com.battlelancer.seriesguide.R;
import com.battlelancer.seriesguide.adapters.TabStripAdapter;
import com.battlelancer.seriesguide.items.Series;
import com.battlelancer.seriesguide.util.DBUtils;
import com.battlelancer.seriesguide.util.Utils;
import com.battlelancer.seriesguide.widgets.SlidingTabLayout;
import com.uwetrottmann.androidutils.AndroidUtils;
import java.lang.ref.WeakReference;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.List;
/**
* Hosts an {@link OverviewFragment}.
*/
public class OverviewActivity extends BaseNavDrawerActivity {
public static final int SHOW_LOADER_ID = 100;
public static final int SHOW_CREDITS_LOADER_ID = 101;
public static final int OVERVIEW_EPISODE_LOADER_ID = 102;
public static final int OVERVIEW_SHOW_LOADER_ID = 103;
public static final int OVERVIEW_ACTIONS_LOADER_ID = 104;
public static final int SEASONS_LOADER_ID = 105;
private static final String TAG = "Overview";
private int mShowId;
private NfcAdapter mNfcAdapter;
@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH)
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_overview);
setupNavDrawer();
mShowId = getIntent().getIntExtra(OverviewFragment.InitBundle.SHOW_TVDBID, -1);
if (mShowId == -1) {
finish();
return;
}
setupActionBar();
setupViews(savedInstanceState);
// Support beaming shows via Android Beam
if (AndroidUtils.isICSOrHigher()) {
mNfcAdapter = NfcAdapter.getDefaultAdapter(this);
if (mNfcAdapter != null) {
mNfcAdapter.setNdefPushMessageCallback(new CreateNdefMessageCallback() {
@Override
public NdefMessage createNdefMessage(NfcEvent event) {
final Series show = DBUtils.getShow(OverviewActivity.this, mShowId);
// send id, also title and overview (both can be empty)
return new NdefMessage(new NdefRecord[] {
createMimeRecord(
"application/com.battlelancer.seriesguide.beam",
String.valueOf(mShowId).getBytes()),
createMimeRecord("application/com.battlelancer.seriesguide.beam",
show.getTitle().getBytes()),
createMimeRecord("application/com.battlelancer.seriesguide.beam",
show
.getOverview().getBytes()
)
});
}
/**
* Creates a custom MIME type encapsulated in an NDEF record
*/
public NdefRecord createMimeRecord(String mimeType, byte[] payload) {
byte[] mimeBytes = mimeType.getBytes(Charset.forName("US-ASCII"));
return new NdefRecord(
NdefRecord.TNF_MIME_MEDIA, mimeBytes, new byte[0], payload);
}
}, this);
}
}
updateShowDelayed(mShowId);
}
private void setupActionBar() {
final ActionBar actionBar = getActionBar();
actionBar.setDisplayShowTitleEnabled(true);
actionBar.setDisplayHomeAsUpEnabled(true);
}
private void setupViews(Bundle savedInstanceState) {
// look if we are on a multi-pane or single-pane layout...
View pagerView = findViewById(R.id.pagerOverview);
if (pagerView != null && pagerView.getVisibility() == View.VISIBLE) {
// ...single pane layout with view pager
// clear up left-over fragments from multi-pane layout
findAndRemoveFragment(R.id.fragment_overview);
findAndRemoveFragment(R.id.fragment_seasons);
setupViewPager(pagerView);
} else {
// ...multi-pane overview and seasons fragment
// clear up left-over fragments from single-pane layout
boolean isSwitchingLayouts = getActiveFragments().size() != 0;
for (Fragment fragment : getActiveFragments()) {
getSupportFragmentManager().beginTransaction().remove(fragment).commit();
}
// attach new fragments if there are none or if we just switched
// layouts
if (savedInstanceState == null || isSwitchingLayouts) {
setupPanes();
}
}
}
private void setupPanes() {
Fragment showsFragment = ShowFragment.newInstance(mShowId);
FragmentTransaction ft1 = getSupportFragmentManager().beginTransaction();
ft1.setCustomAnimations(R.anim.fade_in, R.anim.fade_out);
ft1.replace(R.id.fragment_show, showsFragment);
ft1.commit();
Fragment overviewFragment = OverviewFragment.newInstance(mShowId);
FragmentTransaction ft2 = getSupportFragmentManager().beginTransaction();
ft2.setCustomAnimations(R.anim.fade_in, R.anim.fade_out);
ft2.replace(R.id.fragment_overview, overviewFragment);
ft2.commit();
Fragment seasonsFragment = SeasonsFragment.newInstance(mShowId);
FragmentTransaction ft3 = getSupportFragmentManager().beginTransaction();
ft3.setCustomAnimations(R.anim.fade_in, R.anim.fade_out);
ft3.replace(R.id.fragment_seasons, seasonsFragment);
ft3.commit();
}
private void setupViewPager(View pagerView) {
ViewPager pager = (ViewPager) pagerView;
// setup tab strip
TabStripAdapter tabsAdapter = new TabStripAdapter(getSupportFragmentManager(), this, pager,
(SlidingTabLayout) findViewById(R.id.tabsOverview));
Bundle argsShow = new Bundle();
argsShow.putInt(ShowFragment.InitBundle.SHOW_TVDBID, mShowId);
tabsAdapter.addTab(R.string.show, ShowFragment.class, argsShow);
tabsAdapter.addTab(R.string.description_overview, OverviewFragment.class, getIntent()
.getExtras());
Bundle argsSeason = new Bundle();
argsSeason.putInt(SeasonsFragment.InitBundle.SHOW_TVDBID, mShowId);
tabsAdapter.addTab(R.string.seasons, SeasonsFragment.class, argsSeason);
tabsAdapter.notifyTabsChanged();
// select overview to be shown initially
pager.setCurrentItem(1);
}
private void findAndRemoveFragment(int fragmentId) {
Fragment overviewFragment = getSupportFragmentManager().findFragmentById(fragmentId);
if (overviewFragment != null) {
getSupportFragmentManager().beginTransaction().remove(overviewFragment).commit();
}
}
List<WeakReference<Fragment>> mFragments = new ArrayList<WeakReference<Fragment>>();
@Override
public void onAttachFragment(Fragment fragment) {
/*
* View pager fragments have tags set by the pager, we can use this to
* only add refs to those then, making them available to get removed if
* we switch to a non-pager layout.
*/
if (fragment.getTag() != null) {
mFragments.add(new WeakReference<Fragment>(fragment));
}
}
public ArrayList<Fragment> getActiveFragments() {
ArrayList<Fragment> ret = new ArrayList<Fragment>();
for (WeakReference<Fragment> ref : mFragments) {
Fragment f = ref.get();
if (f != null) {
if (f.isAdded()) {
ret.add(f);
}
}
}
return ret;
}
@Override
public boolean onPrepareOptionsMenu(Menu menu) {
// If the nav drawer is open, hide action items related to the content
// view
menu.findItem(R.id.menu_overview_search).setVisible(!isDrawerOpen());
return super.onPrepareOptionsMenu(menu);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.overview_activity_menu, menu);
return super.onCreateOptionsMenu(menu);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
int itemId = item.getItemId();
if (itemId == android.R.id.home) {
Intent upIntent = new Intent(this, ShowsActivity.class);
upIntent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_NEW_TASK);
startActivity(upIntent);
return true;
} else if (itemId == R.id.menu_overview_search) {
onSearchRequested();
return true;
}
return super.onOptionsItemSelected(item);
}
@Override
public boolean onSearchRequested() {
// refine search with the show's title
final Series show = DBUtils.getShow(this, mShowId);
final String showTitle = show.getTitle();
Bundle args = new Bundle();
args.putString(EpisodeSearchFragment.InitBundle.SHOW_TITLE, showTitle);
startSearch(null, false, args, false);
return true;
}
private void fireTrackerEvent(String label) {
Utils.trackAction(this, TAG, label);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pulsar.transaction.coordinator;
import lombok.Cleanup;
import org.apache.bookkeeper.mledger.ManagedCursor;
import org.apache.bookkeeper.mledger.ManagedLedgerConfig;
import org.apache.bookkeeper.mledger.ManagedLedgerFactory;
import org.apache.bookkeeper.mledger.ManagedLedgerFactoryConfig;
import org.apache.bookkeeper.mledger.Position;
import org.apache.bookkeeper.mledger.impl.ManagedLedgerFactoryImpl;
import org.apache.bookkeeper.mledger.impl.ManagedLedgerImpl;
import org.apache.pulsar.client.api.transaction.TxnID;
import org.apache.pulsar.transaction.coordinator.exceptions.CoordinatorException;
import org.apache.pulsar.transaction.coordinator.exceptions.CoordinatorException.TransactionNotFoundException;
import org.apache.pulsar.transaction.coordinator.impl.MLTransactionLogImpl;
import org.apache.pulsar.transaction.coordinator.impl.MLTransactionMetadataStore;
import org.apache.pulsar.transaction.coordinator.proto.TxnStatus;
import org.apache.pulsar.transaction.coordinator.test.MockedBookKeeperTestCase;
import org.awaitility.Awaitility;
import org.testng.Assert;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import static org.testng.Assert.assertEquals;
public class MLTransactionMetadataStoreTest extends MockedBookKeeperTestCase {
public MLTransactionMetadataStoreTest() {
super(3);
}
@Test
public void testTransactionOperation() throws Exception {
ManagedLedgerFactoryConfig factoryConf = new ManagedLedgerFactoryConfig();
factoryConf.setMaxCacheSize(0);
@Cleanup("shutdown")
ManagedLedgerFactory factory = new ManagedLedgerFactoryImpl(metadataStore, bkc, factoryConf);
TransactionCoordinatorID transactionCoordinatorID = new TransactionCoordinatorID(1);
MLTransactionLogImpl mlTransactionLog = new MLTransactionLogImpl(transactionCoordinatorID, factory,
new ManagedLedgerConfig());
MLTransactionMetadataStore transactionMetadataStore =
new MLTransactionMetadataStore(transactionCoordinatorID, mlTransactionLog,
new TransactionTimeoutTrackerImpl(), new TransactionRecoverTrackerImpl());
int checkReplayRetryCount = 0;
while (true) {
checkReplayRetryCount++;
if (checkReplayRetryCount > 3) {
Assert.fail();
break;
}
if (transactionMetadataStore.checkIfReady()) {
TxnID txnID = transactionMetadataStore.newTransaction(5000).get();
assertEquals(transactionMetadataStore.getTxnStatus(txnID).get(), TxnStatus.OPEN);
List<String> partitions = new ArrayList<>();
partitions.add("pt-1");
partitions.add("pt-2");
transactionMetadataStore.addProducedPartitionToTxn(txnID, partitions).get();
assertEquals(transactionMetadataStore.getTxnMeta(txnID).get().producedPartitions(), partitions);
partitions.add("pt-3");
transactionMetadataStore.addProducedPartitionToTxn(txnID, partitions).get();
assertEquals(transactionMetadataStore.getTxnMeta(txnID).get().producedPartitions(),
partitions);
List<TransactionSubscription> subscriptions = new ArrayList<>();
subscriptions.add(new TransactionSubscription("topic1", "sub1"));
subscriptions.add(new TransactionSubscription("topic2", "sub2"));
transactionMetadataStore.addAckedPartitionToTxn(txnID, subscriptions).get();
Assert.assertTrue(transactionMetadataStore.getTxnMeta(txnID).get().ackedPartitions().containsAll(subscriptions));
transactionMetadataStore.addAckedPartitionToTxn(txnID, subscriptions).get();
assertEquals(transactionMetadataStore.getTxnMeta(txnID).get().producedPartitions(),
partitions);
transactionMetadataStore.updateTxnStatus(txnID, TxnStatus.COMMITTING, TxnStatus.OPEN, false).get();
Assert.assertEquals(transactionMetadataStore.getTxnStatus(txnID).get(), TxnStatus.COMMITTING);
transactionMetadataStore.updateTxnStatus(txnID, TxnStatus.COMMITTED, TxnStatus.COMMITTING, false).get();
try {
transactionMetadataStore.getTxnMeta(txnID).get();
Assert.fail();
} catch (ExecutionException e) {
Assert.assertTrue(e.getCause() instanceof TransactionNotFoundException);
}
break;
} else {
checkReplayRetryCount++;
Thread.sleep(100);
}
}
}
@DataProvider(name = "isUseManagedLedger")
public Object[][] versions() {
return new Object[][] { { true }, { false } };
}
@Test(dataProvider = "isUseManagedLedger")
public void testRecoverSequenceId(boolean isUseManagedLedger) throws Exception {
ManagedLedgerFactoryConfig factoryConf = new ManagedLedgerFactoryConfig();
factoryConf.setMaxCacheSize(0);
@Cleanup("shutdown")
ManagedLedgerFactory factory = new ManagedLedgerFactoryImpl(metadataStore, bkc, factoryConf);
TransactionCoordinatorID transactionCoordinatorID = new TransactionCoordinatorID(1);
ManagedLedgerConfig managedLedgerConfig = new ManagedLedgerConfig();
managedLedgerConfig.setMaxEntriesPerLedger(3);
MLTransactionLogImpl mlTransactionLog = new MLTransactionLogImpl(transactionCoordinatorID, factory,
managedLedgerConfig);
MLTransactionMetadataStore transactionMetadataStore =
new MLTransactionMetadataStore(transactionCoordinatorID, mlTransactionLog,
new TransactionTimeoutTrackerImpl(), new TransactionRecoverTrackerImpl());
Awaitility.await().until(transactionMetadataStore::checkIfReady);
TxnID txnID = transactionMetadataStore.newTransaction(20000).get();
transactionMetadataStore.updateTxnStatus(txnID, TxnStatus.COMMITTING, TxnStatus.OPEN, false).get();
if (isUseManagedLedger) {
transactionMetadataStore.updateTxnStatus(txnID, TxnStatus.COMMITTED, TxnStatus.COMMITTING, false).get();
}
assertEquals(txnID.getLeastSigBits(), 0);
Field field = MLTransactionLogImpl.class.getDeclaredField("managedLedger");
field.setAccessible(true);
ManagedLedgerImpl managedLedger = (ManagedLedgerImpl) field.get(mlTransactionLog);
Position position = managedLedger.getLastConfirmedEntry();
if (isUseManagedLedger) {
Awaitility.await().until(() -> {
managedLedger.rollCurrentLedgerIfFull();
return !managedLedger.ledgerExists(position.getLedgerId());
});
}
transactionMetadataStore =
new MLTransactionMetadataStore(transactionCoordinatorID, mlTransactionLog,
new TransactionTimeoutTrackerImpl(), new TransactionRecoverTrackerImpl());
Awaitility.await().until(transactionMetadataStore::checkIfReady);
txnID = transactionMetadataStore.newTransaction(100000).get();
assertEquals(txnID.getLeastSigBits(), 1);
}
@Test
public void testInitTransactionReader() throws Exception {
ManagedLedgerFactoryConfig factoryConf = new ManagedLedgerFactoryConfig();
factoryConf.setMaxCacheSize(0);
@Cleanup("shutdown")
ManagedLedgerFactory factory = new ManagedLedgerFactoryImpl(metadataStore, bkc, factoryConf);
TransactionCoordinatorID transactionCoordinatorID = new TransactionCoordinatorID(1);
ManagedLedgerConfig managedLedgerConfig = new ManagedLedgerConfig();
managedLedgerConfig.setMaxEntriesPerLedger(2);
MLTransactionLogImpl mlTransactionLog = new MLTransactionLogImpl(transactionCoordinatorID, factory,
managedLedgerConfig);
MLTransactionMetadataStore transactionMetadataStore =
new MLTransactionMetadataStore(transactionCoordinatorID, mlTransactionLog,
new TransactionTimeoutTrackerImpl(), new TransactionRecoverTrackerImpl());
int checkReplayRetryCount = 0;
while (true) {
if (checkReplayRetryCount > 3) {
Assert.fail();
break;
}
if (transactionMetadataStore.checkIfReady()) {
TxnID txnID1 = transactionMetadataStore.newTransaction(1000).get();
TxnID txnID2 = transactionMetadataStore.newTransaction(1000).get();
assertEquals(transactionMetadataStore.getTxnStatus(txnID1).get(), TxnStatus.OPEN);
assertEquals(transactionMetadataStore.getTxnStatus(txnID2).get(), TxnStatus.OPEN);
List<String> partitions = new ArrayList<>();
partitions.add("pt-1");
partitions.add("pt-2");
transactionMetadataStore.addProducedPartitionToTxn(txnID1, partitions).get();
transactionMetadataStore.addProducedPartitionToTxn(txnID2, partitions).get();
List<TransactionSubscription> subscriptions = new ArrayList<>();
subscriptions.add(new TransactionSubscription("topic1", "sub1"));
subscriptions.add(new TransactionSubscription("topic2", "sub2"));
transactionMetadataStore.addAckedPartitionToTxn(txnID1, subscriptions).get();
transactionMetadataStore.addAckedPartitionToTxn(txnID2, subscriptions).get();
List<TransactionSubscription> subscriptions1 = new ArrayList<>();
subscriptions1.add(new TransactionSubscription("topic1", "sub1"));
subscriptions1.add(new TransactionSubscription("topic3", "sub3"));
subscriptions1.add(new TransactionSubscription("topic3", "sub3"));
transactionMetadataStore.addAckedPartitionToTxn(txnID1, subscriptions1).get();
transactionMetadataStore.addAckedPartitionToTxn(txnID2, subscriptions1).get();
transactionMetadataStore.updateTxnStatus(txnID1, TxnStatus.COMMITTING, TxnStatus.OPEN, false).get();
transactionMetadataStore.updateTxnStatus(txnID2, TxnStatus.COMMITTING, TxnStatus.OPEN, false).get();
transactionMetadataStore.closeAsync();
MLTransactionMetadataStore transactionMetadataStoreTest =
new MLTransactionMetadataStore(transactionCoordinatorID,
new MLTransactionLogImpl(transactionCoordinatorID, factory,
new ManagedLedgerConfig()), new TransactionTimeoutTrackerImpl(), new TransactionRecoverTrackerImpl());
while (true) {
if (checkReplayRetryCount > 6) {
Assert.fail();
break;
}
if (transactionMetadataStoreTest.checkIfReady()) {
subscriptions.add(new TransactionSubscription("topic3", "sub3"));
TxnMeta txnMeta1 = transactionMetadataStoreTest.getTxnMeta(txnID1).get();
TxnMeta txnMeta2 = transactionMetadataStoreTest.getTxnMeta(txnID2).get();
assertEquals(txnMeta1.producedPartitions(), partitions);
assertEquals(txnMeta2.producedPartitions(), partitions);
assertEquals(txnMeta1.ackedPartitions().size(), subscriptions.size());
assertEquals(txnMeta2.ackedPartitions().size(), subscriptions.size());
Assert.assertTrue(subscriptions.containsAll(txnMeta1.ackedPartitions()));
Assert.assertTrue(subscriptions.containsAll(txnMeta2.ackedPartitions()));
assertEquals(txnMeta1.status(), TxnStatus.COMMITTING);
assertEquals(txnMeta2.status(), TxnStatus.COMMITTING);
transactionMetadataStoreTest
.updateTxnStatus(txnID1, TxnStatus.COMMITTED, TxnStatus.COMMITTING, false).get();
transactionMetadataStoreTest
.updateTxnStatus(txnID2, TxnStatus.COMMITTED, TxnStatus.COMMITTING, false).get();
try {
transactionMetadataStoreTest.getTxnMeta(txnID1).get();
Assert.fail();
} catch (ExecutionException e) {
Assert.assertTrue(e.getCause() instanceof TransactionNotFoundException);
}
try {
transactionMetadataStoreTest.getTxnMeta(txnID2).get();
Assert.fail();
} catch (ExecutionException e) {
Assert.assertTrue(e.getCause() instanceof TransactionNotFoundException);
}
TxnID txnID = transactionMetadataStoreTest.newTransaction(1000).get();
assertEquals(txnID.getLeastSigBits(), 2L);
break;
} else {
checkReplayRetryCount++;
Thread.sleep(100);
}
}
break;
} else {
checkReplayRetryCount++;
Thread.sleep(100);
}
}
}
@Test
public void testDeleteLog() throws Exception {
ManagedLedgerFactoryConfig factoryConf = new ManagedLedgerFactoryConfig();
factoryConf.setMaxCacheSize(0);
@Cleanup("shutdown")
ManagedLedgerFactory factory = new ManagedLedgerFactoryImpl(metadataStore, bkc, factoryConf);
TransactionCoordinatorID transactionCoordinatorID = new TransactionCoordinatorID(1);
MLTransactionLogImpl mlTransactionLog = new MLTransactionLogImpl(transactionCoordinatorID, factory,
new ManagedLedgerConfig());
MLTransactionMetadataStore transactionMetadataStore =
new MLTransactionMetadataStore(transactionCoordinatorID, mlTransactionLog,
new TransactionTimeoutTrackerImpl(), new TransactionRecoverTrackerImpl());
int checkReplayRetryCount = 0;
while (true) {
if (checkReplayRetryCount > 3) {
Assert.fail();
break;
}
if (transactionMetadataStore.checkIfReady()) {
TxnID txnID1 = transactionMetadataStore.newTransaction(1000).get();
TxnID txnID2 = transactionMetadataStore.newTransaction(1000).get();
assertEquals(transactionMetadataStore.getTxnStatus(txnID1).get(), TxnStatus.OPEN);
assertEquals(transactionMetadataStore.getTxnStatus(txnID2).get(), TxnStatus.OPEN);
List<String> partitions = new ArrayList<>();
partitions.add("pt-1");
partitions.add("pt-2");
transactionMetadataStore.addProducedPartitionToTxn(txnID1, partitions).get();
transactionMetadataStore.addProducedPartitionToTxn(txnID2, partitions).get();
List<TransactionSubscription> subscriptions = new ArrayList<>();
subscriptions.add(new TransactionSubscription("topic1", "sub1"));
subscriptions.add(new TransactionSubscription("topic2", "sub2"));
transactionMetadataStore.addAckedPartitionToTxn(txnID1, subscriptions).get();
transactionMetadataStore.addAckedPartitionToTxn(txnID2, subscriptions).get();
List<TransactionSubscription> subscriptions1 = new ArrayList<>();
subscriptions1.add(new TransactionSubscription("topic1", "sub1"));
subscriptions1.add(new TransactionSubscription("topic3", "sub3"));
subscriptions1.add(new TransactionSubscription("topic3", "sub3"));
transactionMetadataStore.addAckedPartitionToTxn(txnID1, subscriptions1).get();
transactionMetadataStore.addAckedPartitionToTxn(txnID2, subscriptions1).get();
transactionMetadataStore.updateTxnStatus(txnID1, TxnStatus.COMMITTING, TxnStatus.OPEN, false).get();
transactionMetadataStore.updateTxnStatus(txnID2, TxnStatus.ABORTING, TxnStatus.OPEN, false).get();
transactionMetadataStore.updateTxnStatus(txnID1, TxnStatus.COMMITTED, TxnStatus.COMMITTING, false).get();
transactionMetadataStore.updateTxnStatus(txnID2, TxnStatus.ABORTED, TxnStatus.ABORTING, false).get();
Field field = mlTransactionLog.getClass().getDeclaredField("cursor");
field.setAccessible(true);
ManagedCursor cursor = (ManagedCursor) field.get(mlTransactionLog);
assertEquals(cursor.getMarkDeletedPosition(), cursor.getManagedLedger().getLastConfirmedEntry());
break;
} else {
checkReplayRetryCount++;
Thread.sleep(100);
}
}
}
@Test
public void testRecoverWhenDeleteFromCursor() throws Exception {
ManagedLedgerFactoryConfig factoryConf = new ManagedLedgerFactoryConfig();
factoryConf.setMaxCacheSize(0);
@Cleanup("shutdown")
ManagedLedgerFactory factory = new ManagedLedgerFactoryImpl(metadataStore, bkc, factoryConf);
TransactionCoordinatorID transactionCoordinatorID = new TransactionCoordinatorID(1);
MLTransactionLogImpl mlTransactionLog = new MLTransactionLogImpl(transactionCoordinatorID, factory,
new ManagedLedgerConfig());
MLTransactionMetadataStore transactionMetadataStore =
new MLTransactionMetadataStore(transactionCoordinatorID, mlTransactionLog,
new TransactionTimeoutTrackerImpl(), new TransactionRecoverTrackerImpl());
Awaitility.await().until(transactionMetadataStore::checkIfReady);
// txnID1 have not deleted from cursor, we can recover from transaction log
TxnID txnID1 = transactionMetadataStore.newTransaction(1000).get();
// txnID2 have deleted from cursor.
TxnID txnID2 = transactionMetadataStore.newTransaction(1000).get();
transactionMetadataStore.updateTxnStatus(txnID2, TxnStatus.ABORTING, TxnStatus.OPEN, false).get();
transactionMetadataStore.updateTxnStatus(txnID2, TxnStatus.ABORTED, TxnStatus.ABORTING, false).get();
mlTransactionLog = new MLTransactionLogImpl(transactionCoordinatorID, factory,
new ManagedLedgerConfig());
transactionMetadataStore =
new MLTransactionMetadataStore(transactionCoordinatorID, mlTransactionLog,
new TransactionTimeoutTrackerImpl(), new TransactionRecoverTrackerImpl());
Awaitility.await().until(transactionMetadataStore::checkIfReady);
}
public class TransactionTimeoutTrackerImpl implements TransactionTimeoutTracker {
@Override
public CompletableFuture<Boolean> addTransaction(long sequenceId, long timeout) {
return null;
}
@Override
public void replayAddTransaction(long sequenceId, long timeout) {
}
@Override
public void start() {
}
@Override
public void close() {
}
}
public static class TransactionRecoverTrackerImpl implements TransactionRecoverTracker {
@Override
public void updateTransactionStatus(long sequenceId, TxnStatus txnStatus) throws CoordinatorException.InvalidTxnStatusException {
}
@Override
public void handleOpenStatusTransaction(long sequenceId, long timeout) {
}
@Override
public void appendOpenTransactionToTimeoutTracker() {
}
@Override
public void handleCommittingAndAbortingTransaction() {
}
}
}
| |
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.repository.kdr.delegates;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.RowMetaAndData;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.exception.KettleDatabaseException;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettlePluginLoaderException;
import org.pentaho.di.core.gui.Point;
import org.pentaho.di.core.plugins.PluginInterface;
import org.pentaho.di.core.plugins.PluginRegistry;
import org.pentaho.di.core.plugins.StepPluginType;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.row.ValueMeta;
import org.pentaho.di.core.row.ValueMetaInterface;
import org.pentaho.di.core.variables.VariableSpace;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.partition.PartitionSchema;
import org.pentaho.di.repository.LongObjectId;
import org.pentaho.di.repository.ObjectId;
import org.pentaho.di.repository.kdr.KettleDatabaseRepository;
import org.pentaho.di.trans.step.RowDistributionInterface;
import org.pentaho.di.trans.step.RowDistributionPluginType;
import org.pentaho.di.trans.step.StepErrorMeta;
import org.pentaho.di.trans.step.StepMeta;
import org.pentaho.di.trans.step.StepMetaInterface;
import org.pentaho.di.trans.step.StepPartitioningMeta;
public class KettleDatabaseRepositoryStepDelegate extends KettleDatabaseRepositoryBaseDelegate {
private static Class<?> PKG = StepMeta.class; // for i18n purposes, needed by Translator2!!
public static final String STEP_ATTRIBUTE_PREFIX = "_ATTR_" + '\t';
public KettleDatabaseRepositoryStepDelegate( KettleDatabaseRepository repository ) {
super( repository );
}
public synchronized ObjectId getStepTypeID( String code ) throws KettleException {
return repository.connectionDelegate.getIDWithValue(
quoteTable( KettleDatabaseRepository.TABLE_R_STEP_TYPE ),
quote( KettleDatabaseRepository.FIELD_STEP_TYPE_ID_STEP_TYPE ),
quote( KettleDatabaseRepository.FIELD_STEP_TYPE_CODE ), code );
}
public ObjectId[] getStepTypeIDs( String[] codes, int amount ) throws KettleException {
if ( amount != codes.length ) {
String[] tmp = new String[ amount ];
System.arraycopy( codes, 0, tmp, 0, amount );
codes = tmp;
}
return repository.connectionDelegate.getIDsWithValues(
quoteTable( KettleDatabaseRepository.TABLE_R_STEP_TYPE ),
quote( KettleDatabaseRepository.FIELD_STEP_TYPE_ID_STEP_TYPE ),
quote( KettleDatabaseRepository.FIELD_STEP_TYPE_CODE ), codes );
}
public synchronized ObjectId getStepID( String name, ObjectId id_transformation ) throws KettleException {
return repository.connectionDelegate.getIDWithValue(
quoteTable( KettleDatabaseRepository.TABLE_R_STEP ), quote( KettleDatabaseRepository.FIELD_STEP_ID_STEP ),
quote( KettleDatabaseRepository.FIELD_STEP_NAME ), name,
quote( KettleDatabaseRepository.FIELD_STEP_ID_TRANSFORMATION ), id_transformation );
}
public synchronized String getStepTypeCode( ObjectId id_database_type ) throws KettleException {
return repository.connectionDelegate.getStringWithID(
quoteTable( KettleDatabaseRepository.TABLE_R_STEP_TYPE ),
quote( KettleDatabaseRepository.FIELD_STEP_TYPE_ID_STEP_TYPE ), id_database_type,
quote( KettleDatabaseRepository.FIELD_STEP_TYPE_CODE ) );
}
public RowMetaAndData getStep( ObjectId id_step ) throws KettleException {
return repository.connectionDelegate.getOneRow(
quoteTable( KettleDatabaseRepository.TABLE_R_STEP ), quote( KettleDatabaseRepository.FIELD_STEP_ID_STEP ),
id_step );
}
public RowMetaAndData getStepType( ObjectId id_step_type ) throws KettleException {
return repository.connectionDelegate.getOneRow(
quoteTable( KettleDatabaseRepository.TABLE_R_STEP_TYPE ),
quote( KettleDatabaseRepository.FIELD_STEP_TYPE_ID_STEP_TYPE ), id_step_type );
}
public RowMetaAndData getStepAttribute( ObjectId id_step_attribute ) throws KettleException {
return repository.connectionDelegate.getOneRow(
quoteTable( KettleDatabaseRepository.TABLE_R_STEP_ATTRIBUTE ),
quote( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_ID_STEP_ATTRIBUTE ), id_step_attribute );
}
/**
* Create a new step by loading the metadata from the specified repository.
*
* @param rep
* @param stepId
* @param databases
* @param counters
* @param partitionSchemas
* @throws KettleException
*/
public StepMeta loadStepMeta( ObjectId stepId, List<DatabaseMeta> databases,
List<PartitionSchema> partitionSchemas ) throws KettleException {
StepMeta stepMeta = new StepMeta();
PluginRegistry registry = PluginRegistry.getInstance();
try {
RowMetaAndData r = getStep( stepId );
if ( r != null ) {
stepMeta.setObjectId( stepId );
stepMeta.setName( r.getString( KettleDatabaseRepository.FIELD_STEP_NAME, null ) );
stepMeta.setDescription( r.getString( KettleDatabaseRepository.FIELD_STEP_DESCRIPTION, null ) );
long id_step_type = r.getInteger( KettleDatabaseRepository.FIELD_STEP_ID_STEP_TYPE, -1L );
RowMetaAndData steptyperow = getStepType( new LongObjectId( id_step_type ) );
stepMeta.setStepID( steptyperow.getString( KettleDatabaseRepository.FIELD_STEP_TYPE_CODE, null ) );
stepMeta.setDistributes( r.getBoolean( KettleDatabaseRepository.FIELD_STEP_DISTRIBUTE, true ) );
int copies = (int) r.getInteger( KettleDatabaseRepository.FIELD_STEP_COPIES, 1 );
String copiesString = r.getString( KettleDatabaseRepository.FIELD_STEP_COPIES_STRING, null );
if ( !Const.isEmpty( copiesString ) ) {
stepMeta.setCopiesString( copiesString );
} else {
stepMeta.setCopies( copies );
}
int x = (int) r.getInteger( KettleDatabaseRepository.FIELD_STEP_GUI_LOCATION_X, 0 );
int y = (int) r.getInteger( KettleDatabaseRepository.FIELD_STEP_GUI_LOCATION_Y, 0 );
stepMeta.setLocation( new Point( x, y ) );
stepMeta.setDraw( r.getBoolean( KettleDatabaseRepository.FIELD_STEP_GUI_DRAW, false ) );
// Generate the appropriate class...
PluginInterface sp = registry.findPluginWithId( StepPluginType.class, stepMeta.getStepID() );
if ( sp != null ) {
stepMeta.setStepMetaInterface( (StepMetaInterface) registry.loadClass( sp ) );
} else {
throw new KettlePluginLoaderException( stepMeta.getStepID(), BaseMessages.getString(
PKG, "StepMeta.Exception.UnableToLoadClass", stepMeta.getStepID() + Const.CR ) );
}
if ( stepMeta.getStepMetaInterface() != null ) {
// Read the step info from the repository!
readRepCompatibleStepMeta(
stepMeta.getStepMetaInterface(), repository, stepMeta.getObjectId(), databases );
stepMeta.getStepMetaInterface().readRep(
repository, repository.metaStore, stepMeta.getObjectId(), databases );
}
// Get the partitioning as well...
//
stepMeta.setStepPartitioningMeta( loadStepPartitioningMeta( stepMeta.getObjectId() ) );
stepMeta.getStepPartitioningMeta().setPartitionSchemaAfterLoading( partitionSchemas );
// Get the cluster schema name
//
stepMeta.setClusterSchemaName( repository.getStepAttributeString( stepId, "cluster_schema" ) );
// Are we using a custom row distribution plugin?
//
String rowDistributionCode = repository.getStepAttributeString( stepId, 0, "row_distribution_code" );
RowDistributionInterface rowDistribution =
PluginRegistry.getInstance().loadClass(
RowDistributionPluginType.class, rowDistributionCode, RowDistributionInterface.class );
stepMeta.setRowDistribution( rowDistribution );
// Load the attribute groups map
//
stepMeta.setAttributesMap( loadStepAttributesMap( stepId ) );
// Done!
//
return stepMeta;
} else {
throw new KettleException( BaseMessages.getString(
PKG, "StepMeta.Exception.StepInfoCouldNotBeFound", String.valueOf( stepId ) ) );
}
} catch ( KettleDatabaseException dbe ) {
throw new KettleException( BaseMessages.getString( PKG, "StepMeta.Exception.StepCouldNotBeLoaded", String
.valueOf( stepMeta.getObjectId() ) ), dbe );
}
}
/**
* Compatible loading of metadata for v4 style plugins using deprecated methods.
*
* @param stepMetaInterface
* @param repository
* @param objectId
* @param databases
* @throws KettleException
*/
@SuppressWarnings( "deprecation" )
private void readRepCompatibleStepMeta( StepMetaInterface stepMetaInterface,
KettleDatabaseRepository repository, ObjectId objectId, List<DatabaseMeta> databases ) throws KettleException {
stepMetaInterface.readRep( repository, objectId, databases, null );
}
public void saveStepMeta( StepMeta stepMeta, ObjectId transformationId ) throws KettleException {
try {
log.logDebug( BaseMessages.getString( PKG, "StepMeta.Log.SaveNewStep" ) );
// Insert new Step in repository
stepMeta.setObjectId( insertStep(
transformationId, stepMeta.getName(), stepMeta.getDescription(), stepMeta.getStepID(), stepMeta
.isDistributes(), stepMeta.getCopies(), stepMeta.getLocation() == null
? -1 : stepMeta.getLocation().x, stepMeta.getLocation() == null ? -1 : stepMeta.getLocation().y,
stepMeta.isDrawn(), stepMeta.getCopiesString() ) );
// Save partitioning selection for the step
//
repository.stepDelegate.saveStepPartitioningMeta(
stepMeta.getStepPartitioningMeta(), transformationId, stepMeta.getObjectId() );
// The id_step is known, as well as the id_transformation
// This means we can now save the attributes of the step...
//
log.logDebug( BaseMessages.getString( PKG, "StepMeta.Log.SaveStepDetails" ) );
compatibleSaveRep( stepMeta.getStepMetaInterface(), repository, transformationId, stepMeta.getObjectId() );
stepMeta.getStepMetaInterface().saveRep(
repository, repository.metaStore, transformationId, stepMeta.getObjectId() );
// Save the name of the clustering schema that was chosen.
//
repository.saveStepAttribute( transformationId, stepMeta.getObjectId(), "cluster_schema", stepMeta
.getClusterSchema() == null ? "" : stepMeta.getClusterSchema().getName() );
// Save the row distribution code (plugin ID)
//
repository.saveStepAttribute( transformationId, stepMeta.getObjectId(), "row_distribution_code", stepMeta
.getRowDistribution() == null ? null : stepMeta.getRowDistribution().getCode() );
// Save the attribute groups map
//
saveAttributesMap( transformationId, stepMeta.getObjectId(), stepMeta.getAttributesMap() );
} catch ( KettleException e ) {
throw new KettleException( BaseMessages.getString( PKG, "StepMeta.Exception.UnableToSaveStepInfo", String
.valueOf( transformationId ) ), e );
}
}
@SuppressWarnings( "deprecation" )
private void compatibleSaveRep( StepMetaInterface stepMetaInterface, KettleDatabaseRepository repository,
ObjectId id_transformation, ObjectId objectId ) throws KettleException {
stepMetaInterface.saveRep( repository, id_transformation, objectId );
}
public void saveStepErrorMeta( StepErrorMeta meta, ObjectId id_transformation, ObjectId id_step ) throws KettleException {
repository.saveStepAttribute( id_transformation, id_step, "step_error_handling_source_step", meta
.getSourceStep() != null ? meta.getSourceStep().getName() : "" );
repository.saveStepAttribute( id_transformation, id_step, "step_error_handling_target_step", meta
.getTargetStep() != null ? meta.getTargetStep().getName() : "" );
repository.saveStepAttribute( id_transformation, id_step, "step_error_handling_is_enabled", meta.isEnabled() );
repository.saveStepAttribute( id_transformation, id_step, "step_error_handling_nr_valuename", meta
.getNrErrorsValuename() );
repository.saveStepAttribute( id_transformation, id_step, "step_error_handling_descriptions_valuename", meta
.getErrorDescriptionsValuename() );
repository.saveStepAttribute( id_transformation, id_step, "step_error_handling_fields_valuename", meta
.getErrorFieldsValuename() );
repository.saveStepAttribute( id_transformation, id_step, "step_error_handling_codes_valuename", meta
.getErrorCodesValuename() );
repository.saveStepAttribute( id_transformation, id_step, "step_error_handling_max_errors", meta
.getMaxErrors() );
repository.saveStepAttribute( id_transformation, id_step, "step_error_handling_max_pct_errors", meta
.getMaxPercentErrors() );
repository.saveStepAttribute( id_transformation, id_step, "step_error_handling_min_pct_rows", meta
.getMinPercentRows() );
}
public StepErrorMeta loadStepErrorMeta( VariableSpace variables, StepMeta stepMeta, List<StepMeta> steps ) throws KettleException {
StepErrorMeta meta = new StepErrorMeta( variables, stepMeta );
meta.setTargetStep( StepMeta.findStep( steps, repository.getStepAttributeString(
stepMeta.getObjectId(), "step_error_handling_target_step" ) ) );
meta
.setEnabled( repository.getStepAttributeBoolean( stepMeta.getObjectId(), "step_error_handling_is_enabled" ) );
meta.setNrErrorsValuename( repository.getStepAttributeString(
stepMeta.getObjectId(), "step_error_handling_nr_valuename" ) );
meta.setErrorDescriptionsValuename( repository.getStepAttributeString(
stepMeta.getObjectId(), "step_error_handling_descriptions_valuename" ) );
meta.setErrorFieldsValuename( repository.getStepAttributeString(
stepMeta.getObjectId(), "step_error_handling_fields_valuename" ) );
meta.setErrorCodesValuename( repository.getStepAttributeString(
stepMeta.getObjectId(), "step_error_handling_codes_valuename" ) );
meta.setMaxErrors( repository
.getStepAttributeString( stepMeta.getObjectId(), "step_error_handling_max_errors" ) );
meta.setMaxPercentErrors( repository.getStepAttributeString(
stepMeta.getObjectId(), "step_error_handling_max_pct_errors" ) );
meta.setMinPercentRows( repository.getStepAttributeString(
stepMeta.getObjectId(), "step_error_handling_min_pct_rows" ) );
return meta;
}
public StepPartitioningMeta loadStepPartitioningMeta( ObjectId id_step ) throws KettleException {
StepPartitioningMeta stepPartitioningMeta = new StepPartitioningMeta();
stepPartitioningMeta.setPartitionSchemaName( repository
.getStepAttributeString( id_step, "PARTITIONING_SCHEMA" ) );
String methodCode = repository.getStepAttributeString( id_step, "PARTITIONING_METHOD" );
stepPartitioningMeta.setMethod( StepPartitioningMeta.getMethod( methodCode ) );
if ( stepPartitioningMeta.getPartitioner() != null ) {
stepPartitioningMeta.getPartitioner().loadRep( repository, id_step );
}
stepPartitioningMeta.hasChanged( true );
return stepPartitioningMeta;
}
/**
* Saves partitioning properties in the repository for the given step.
*
* @param meta
* the partitioning metadata to store.
* @param id_transformation
* the ID of the transformation
* @param id_step
* the ID of the step
* @throws KettleDatabaseException
* In case anything goes wrong
*
*/
public void saveStepPartitioningMeta( StepPartitioningMeta meta, ObjectId id_transformation, ObjectId id_step ) throws KettleException {
repository.saveStepAttribute(
id_transformation, id_step, "PARTITIONING_SCHEMA", meta.getPartitionSchema() != null ? meta
.getPartitionSchema().getName() : "" ); // selected schema
repository.saveStepAttribute( id_transformation, id_step, "PARTITIONING_METHOD", meta.getMethodCode() );
if ( meta.getPartitioner() != null ) {
meta.getPartitioner().saveRep( repository, id_transformation, id_step );
}
}
//CHECKSTYLE:LineLength:OFF
public synchronized ObjectId insertStep( ObjectId id_transformation, String name, String description,
String steptype, boolean distribute, long copies, long gui_location_x, long gui_location_y,
boolean gui_draw, String copiesString ) throws KettleException {
ObjectId id = repository.connectionDelegate.getNextStepID();
ObjectId id_step_type = getStepTypeID( steptype );
RowMetaAndData table = new RowMetaAndData();
table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_ID_STEP, ValueMetaInterface.TYPE_INTEGER ), id );
table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_ID_TRANSFORMATION, ValueMetaInterface.TYPE_INTEGER ), id_transformation );
table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_NAME, ValueMetaInterface.TYPE_STRING ), name );
table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_DESCRIPTION, ValueMetaInterface.TYPE_STRING ), description );
table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_ID_STEP_TYPE, ValueMetaInterface.TYPE_INTEGER ), id_step_type );
table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_DISTRIBUTE, ValueMetaInterface.TYPE_BOOLEAN ), Boolean.valueOf( distribute ) );
table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_COPIES, ValueMetaInterface.TYPE_INTEGER ), new Long( copies ) );
table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_GUI_LOCATION_X, ValueMetaInterface.TYPE_INTEGER ), new Long( gui_location_x ) );
table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_GUI_LOCATION_Y, ValueMetaInterface.TYPE_INTEGER ), new Long( gui_location_y ) );
table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_GUI_DRAW, ValueMetaInterface.TYPE_BOOLEAN ), Boolean.valueOf( gui_draw ) );
table.addValue( new ValueMeta( KettleDatabaseRepository.FIELD_STEP_COPIES_STRING, ValueMetaInterface.TYPE_STRING ), copiesString );
repository.connectionDelegate.getDatabase().prepareInsert( table.getRowMeta(), KettleDatabaseRepository.TABLE_R_STEP );
repository.connectionDelegate.getDatabase().setValuesInsert( table );
repository.connectionDelegate.getDatabase().insertRow();
repository.connectionDelegate.getDatabase().closeInsert();
return id;
}
public synchronized int getNrSteps( ObjectId id_transformation ) throws KettleException {
int retval = 0;
RowMetaAndData par = repository.connectionDelegate.getParameterMetaData( id_transformation );
String sql =
"SELECT COUNT(*) FROM "
+ quoteTable( KettleDatabaseRepository.TABLE_R_STEP ) + " WHERE "
+ quote( KettleDatabaseRepository.FIELD_STEP_ID_TRANSFORMATION ) + " = ? ";
RowMetaAndData r = repository.connectionDelegate.getOneRow( sql, par.getRowMeta(), par.getData() );
if ( r != null ) {
retval = (int) r.getInteger( 0, 0L );
}
return retval;
}
public synchronized int getNrStepAttributes( ObjectId id_step ) throws KettleException {
int retval = 0;
RowMetaAndData par = repository.connectionDelegate.getParameterMetaData( id_step );
String sql =
"SELECT COUNT(*) FROM "
+ quoteTable( KettleDatabaseRepository.TABLE_R_STEP_ATTRIBUTE ) + " WHERE "
+ quote( KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_ID_STEP ) + " = ? ";
RowMetaAndData r = repository.connectionDelegate.getOneRow( sql, par.getRowMeta(), par.getData() );
if ( r != null ) {
retval = (int) r.getInteger( 0, 0L );
}
return retval;
}
private void saveAttributesMap( ObjectId transformationId, ObjectId stepId,
Map<String, Map<String, String>> attributesMap ) throws KettleException {
for ( final String groupName : attributesMap.keySet() ) {
Map<String, String> attributes = attributesMap.get( groupName );
for ( final String key : attributes.keySet() ) {
final String value = attributes.get( key );
if ( key != null && value != null ) {
repository.connectionDelegate.insertStepAttribute( transformationId, stepId, 0, STEP_ATTRIBUTE_PREFIX
+ groupName + '\t' + value, 0, value );
}
}
}
}
private Map<String, Map<String, String>> loadStepAttributesMap( ObjectId stepId ) throws KettleException {
Map<String, Map<String, String>> attributesMap = new HashMap<String, Map<String, String>>();
List<Object[]> attributeRows = repository.connectionDelegate.getStepAttributesBuffer();
RowMetaInterface rowMeta = repository.connectionDelegate.getStepAttributesRowMeta();
for ( Object[] attributeRow : attributeRows ) {
String code = rowMeta.getString( attributeRow, KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_CODE, null );
if ( code != null && code.startsWith( STEP_ATTRIBUTE_PREFIX ) ) {
String value =
rowMeta.getString( attributeRow, KettleDatabaseRepository.FIELD_STEP_ATTRIBUTE_VALUE_STR, null );
if ( value != null ) {
code = code.substring( STEP_ATTRIBUTE_PREFIX.length() );
int tabIndex = code.indexOf( '\t' );
if ( tabIndex > 0 ) {
String groupName = code.substring( 0, tabIndex );
String key = code.substring( tabIndex + 1 );
Map<String, String> attributes = attributesMap.get( groupName );
if ( attributes == null ) {
attributes = new HashMap<String, String>();
attributesMap.put( groupName, attributes );
}
attributes.put( key, value );
}
}
}
}
return attributesMap;
}
}
| |
package org.ripple.bouncycastle.pqc.math.linearalgebra;
import java.math.BigInteger;
import java.util.Random;
/**
* This class implements an element of the finite field <i>GF(2<sup>n </sup>)</i>.
* It is represented in an optimal normal basis representation and holds the
* pointer <tt>mField</tt> to its corresponding field.
*
* @see GF2nField
* @see GF2nElement
*/
public class GF2nONBElement
extends GF2nElement
{
// /////////////////////////////////////////////////////////////////////
// member variables
// /////////////////////////////////////////////////////////////////////
private static final long[] mBitmask = new long[]{0x0000000000000001L,
0x0000000000000002L, 0x0000000000000004L, 0x0000000000000008L,
0x0000000000000010L, 0x0000000000000020L, 0x0000000000000040L,
0x0000000000000080L, 0x0000000000000100L, 0x0000000000000200L,
0x0000000000000400L, 0x0000000000000800L, 0x0000000000001000L,
0x0000000000002000L, 0x0000000000004000L, 0x0000000000008000L,
0x0000000000010000L, 0x0000000000020000L, 0x0000000000040000L,
0x0000000000080000L, 0x0000000000100000L, 0x0000000000200000L,
0x0000000000400000L, 0x0000000000800000L, 0x0000000001000000L,
0x0000000002000000L, 0x0000000004000000L, 0x0000000008000000L,
0x0000000010000000L, 0x0000000020000000L, 0x0000000040000000L,
0x0000000080000000L, 0x0000000100000000L, 0x0000000200000000L,
0x0000000400000000L, 0x0000000800000000L, 0x0000001000000000L,
0x0000002000000000L, 0x0000004000000000L, 0x0000008000000000L,
0x0000010000000000L, 0x0000020000000000L, 0x0000040000000000L,
0x0000080000000000L, 0x0000100000000000L, 0x0000200000000000L,
0x0000400000000000L, 0x0000800000000000L, 0x0001000000000000L,
0x0002000000000000L, 0x0004000000000000L, 0x0008000000000000L,
0x0010000000000000L, 0x0020000000000000L, 0x0040000000000000L,
0x0080000000000000L, 0x0100000000000000L, 0x0200000000000000L,
0x0400000000000000L, 0x0800000000000000L, 0x1000000000000000L,
0x2000000000000000L, 0x4000000000000000L, 0x8000000000000000L};
private static final long[] mMaxmask = new long[]{0x0000000000000001L,
0x0000000000000003L, 0x0000000000000007L, 0x000000000000000FL,
0x000000000000001FL, 0x000000000000003FL, 0x000000000000007FL,
0x00000000000000FFL, 0x00000000000001FFL, 0x00000000000003FFL,
0x00000000000007FFL, 0x0000000000000FFFL, 0x0000000000001FFFL,
0x0000000000003FFFL, 0x0000000000007FFFL, 0x000000000000FFFFL,
0x000000000001FFFFL, 0x000000000003FFFFL, 0x000000000007FFFFL,
0x00000000000FFFFFL, 0x00000000001FFFFFL, 0x00000000003FFFFFL,
0x00000000007FFFFFL, 0x0000000000FFFFFFL, 0x0000000001FFFFFFL,
0x0000000003FFFFFFL, 0x0000000007FFFFFFL, 0x000000000FFFFFFFL,
0x000000001FFFFFFFL, 0x000000003FFFFFFFL, 0x000000007FFFFFFFL,
0x00000000FFFFFFFFL, 0x00000001FFFFFFFFL, 0x00000003FFFFFFFFL,
0x00000007FFFFFFFFL, 0x0000000FFFFFFFFFL, 0x0000001FFFFFFFFFL,
0x0000003FFFFFFFFFL, 0x0000007FFFFFFFFFL, 0x000000FFFFFFFFFFL,
0x000001FFFFFFFFFFL, 0x000003FFFFFFFFFFL, 0x000007FFFFFFFFFFL,
0x00000FFFFFFFFFFFL, 0x00001FFFFFFFFFFFL, 0x00003FFFFFFFFFFFL,
0x00007FFFFFFFFFFFL, 0x0000FFFFFFFFFFFFL, 0x0001FFFFFFFFFFFFL,
0x0003FFFFFFFFFFFFL, 0x0007FFFFFFFFFFFFL, 0x000FFFFFFFFFFFFFL,
0x001FFFFFFFFFFFFFL, 0x003FFFFFFFFFFFFFL, 0x007FFFFFFFFFFFFFL,
0x00FFFFFFFFFFFFFFL, 0x01FFFFFFFFFFFFFFL, 0x03FFFFFFFFFFFFFFL,
0x07FFFFFFFFFFFFFFL, 0x0FFFFFFFFFFFFFFFL, 0x1FFFFFFFFFFFFFFFL,
0x3FFFFFFFFFFFFFFFL, 0x7FFFFFFFFFFFFFFFL, 0xFFFFFFFFFFFFFFFFL};
// mIBy64[j * 16 + i] = (j * 16 + i)/64
// i =
// 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
//
private static final int[] mIBY64 = new int[]{
// j =
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 0
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 1
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 2
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 3
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, // 4
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, // 5
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, // 6
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, // 7
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, // 8
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, // 9
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, // 10
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, // 11
3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, // 12
3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, // 13
3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, // 14
3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, // 15
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, // 16
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, // 17
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, // 18
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, // 19
5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, // 20
5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, // 21
5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, // 22
5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5 // 23
};
private static final int MAXLONG = 64;
/**
* holds the lenght of the polynomial with 64 bit sized fields.
*/
private int mLength;
/**
* holds the value of mDeg % MAXLONG.
*/
private int mBit;
/**
* holds this element in ONB representation.
*/
private long[] mPol;
// /////////////////////////////////////////////////////////////////////
// constructors
// /////////////////////////////////////////////////////////////////////
/**
* Construct a random element over the field <tt>gf2n</tt>, using the
* specified source of randomness.
*
* @param gf2n the field
* @param rand the source of randomness
*/
public GF2nONBElement(GF2nONBField gf2n, Random rand)
{
mField = gf2n;
mDegree = mField.getDegree();
mLength = gf2n.getONBLength();
mBit = gf2n.getONBBit();
mPol = new long[mLength];
if (mLength > 1)
{
for (int j = 0; j < mLength - 1; j++)
{
mPol[j] = rand.nextLong();
}
long last = rand.nextLong();
mPol[mLength - 1] = last >>> (MAXLONG - mBit);
}
else
{
mPol[0] = rand.nextLong();
mPol[0] = mPol[0] >>> (MAXLONG - mBit);
}
}
/**
* Construct a new GF2nONBElement from its encoding.
*
* @param gf2n the field
* @param e the encoded element
*/
public GF2nONBElement(GF2nONBField gf2n, byte[] e)
{
mField = gf2n;
mDegree = mField.getDegree();
mLength = gf2n.getONBLength();
mBit = gf2n.getONBBit();
mPol = new long[mLength];
assign(e);
}
/**
* Construct the element of the field <tt>gf2n</tt> with the specified
* value <tt>val</tt>.
*
* @param gf2n the field
* @param val the value represented by a BigInteger
*/
public GF2nONBElement(GF2nONBField gf2n, BigInteger val)
{
mField = gf2n;
mDegree = mField.getDegree();
mLength = gf2n.getONBLength();
mBit = gf2n.getONBBit();
mPol = new long[mLength];
assign(val);
}
/**
* Construct the element of the field <tt>gf2n</tt> with the specified
* value <tt>val</tt>.
*
* @param gf2n the field
* @param val the value in ONB representation
*/
private GF2nONBElement(GF2nONBField gf2n, long[] val)
{
mField = gf2n;
mDegree = mField.getDegree();
mLength = gf2n.getONBLength();
mBit = gf2n.getONBBit();
mPol = val;
}
// /////////////////////////////////////////////////////////////////////
// pseudo-constructors
// /////////////////////////////////////////////////////////////////////
/**
* Copy constructor.
*
* @param gf2n the field
*/
public GF2nONBElement(GF2nONBElement gf2n)
{
mField = gf2n.mField;
mDegree = mField.getDegree();
mLength = ((GF2nONBField)mField).getONBLength();
mBit = ((GF2nONBField)mField).getONBBit();
mPol = new long[mLength];
assign(gf2n.getElement());
}
/**
* Create a new GF2nONBElement by cloning this GF2nPolynomialElement.
*
* @return a copy of this element
*/
public Object clone()
{
return new GF2nONBElement(this);
}
/**
* Create the zero element.
*
* @param gf2n the finite field
* @return the zero element in the given finite field
*/
public static GF2nONBElement ZERO(GF2nONBField gf2n)
{
long[] polynomial = new long[gf2n.getONBLength()];
return new GF2nONBElement(gf2n, polynomial);
}
/**
* Create the one element.
*
* @param gf2n the finite field
* @return the one element in the given finite field
*/
public static GF2nONBElement ONE(GF2nONBField gf2n)
{
int mLength = gf2n.getONBLength();
long[] polynomial = new long[mLength];
// fill mDegree coefficients with one's
for (int i = 0; i < mLength - 1; i++)
{
polynomial[i] = 0xffffffffffffffffL;
}
polynomial[mLength - 1] = mMaxmask[gf2n.getONBBit() - 1];
return new GF2nONBElement(gf2n, polynomial);
}
// /////////////////////////////////////////////////////////////////////
// assignments
// /////////////////////////////////////////////////////////////////////
/**
* assigns to this element the zero element
*/
void assignZero()
{
mPol = new long[mLength];
}
/**
* assigns to this element the one element
*/
void assignOne()
{
// fill mDegree coefficients with one's
for (int i = 0; i < mLength - 1; i++)
{
mPol[i] = 0xffffffffffffffffL;
}
mPol[mLength - 1] = mMaxmask[mBit - 1];
}
/**
* assigns to this element the value <tt>val</tt>.
*
* @param val the value represented by a BigInteger
*/
private void assign(BigInteger val)
{
assign(val.toByteArray());
}
/**
* assigns to this element the value <tt>val</tt>.
*
* @param val the value in ONB representation
*/
private void assign(long[] val)
{
System.arraycopy(val, 0, mPol, 0, mLength);
}
/**
* assigns to this element the value <tt>val</tt>. First: inverting the
* order of val into reversed[]. That means: reversed[0] = val[length - 1],
* ..., reversed[reversed.length - 1] = val[0]. Second: mPol[0] = sum{i = 0,
* ... 7} (val[i]<<(i*8)) .... mPol[1] = sum{i = 8, ... 15} (val[i]<<(i*8))
*
* @param val the value in ONB representation
*/
private void assign(byte[] val)
{
int j;
mPol = new long[mLength];
for (j = 0; j < val.length; j++)
{
mPol[j >>> 3] |= (val[val.length - 1 - j] & 0x00000000000000ffL) << ((j & 0x07) << 3);
}
}
// /////////////////////////////////////////////////////////////////
// comparison
// /////////////////////////////////////////////////////////////////
/**
* Checks whether this element is zero.
*
* @return <tt>true</tt> if <tt>this</tt> is the zero element
*/
public boolean isZero()
{
boolean result = true;
for (int i = 0; i < mLength && result; i++)
{
result = result && ((mPol[i] & 0xFFFFFFFFFFFFFFFFL) == 0);
}
return result;
}
/**
* Checks whether this element is one.
*
* @return <tt>true</tt> if <tt>this</tt> is the one element
*/
public boolean isOne()
{
boolean result = true;
for (int i = 0; i < mLength - 1 && result; i++)
{
result = result
&& ((mPol[i] & 0xFFFFFFFFFFFFFFFFL) == 0xFFFFFFFFFFFFFFFFL);
}
if (result)
{
result = result
&& ((mPol[mLength - 1] & mMaxmask[mBit - 1]) == mMaxmask[mBit - 1]);
}
return result;
}
/**
* Compare this element with another object.
*
* @param other the other object
* @return <tt>true</tt> if the two objects are equal, <tt>false</tt>
* otherwise
*/
public boolean equals(Object other)
{
if (other == null || !(other instanceof GF2nONBElement))
{
return false;
}
GF2nONBElement otherElem = (GF2nONBElement)other;
for (int i = 0; i < mLength; i++)
{
if (mPol[i] != otherElem.mPol[i])
{
return false;
}
}
return true;
}
/**
* @return the hash code of this element
*/
public int hashCode()
{
return mPol.hashCode();
}
// /////////////////////////////////////////////////////////////////////
// access
// /////////////////////////////////////////////////////////////////////
/**
* Returns whether the highest bit of the bit representation is set
*
* @return true, if the highest bit of mPol is set, false, otherwise
*/
public boolean testRightmostBit()
{
// due to the reverse bit order (compared to 1363) this method returns
// the value of the leftmost bit
return (mPol[mLength - 1] & mBitmask[mBit - 1]) != 0L;
}
/**
* Checks whether the indexed bit of the bit representation is set. Warning:
* GF2nONBElement currently stores its bits in reverse order (compared to
* 1363) !!!
*
* @param index the index of the bit to test
* @return <tt>true</tt> if the indexed bit of mPol is set, <tt>false</tt>
* otherwise.
*/
boolean testBit(int index)
{
if (index < 0 || index > mDegree)
{
return false;
}
long test = mPol[index >>> 6] & mBitmask[index & 0x3f];
return test != 0x0L;
}
/**
* @return this element in its ONB representation
*/
private long[] getElement()
{
long[] result = new long[mPol.length];
System.arraycopy(mPol, 0, result, 0, mPol.length);
return result;
}
/**
* Returns the ONB representation of this element. The Bit-Order is
* exchanged (according to 1363)!
*
* @return this element in its representation and reverse bit-order
*/
private long[] getElementReverseOrder()
{
long[] result = new long[mPol.length];
for (int i = 0; i < mDegree; i++)
{
if (testBit(mDegree - i - 1))
{
result[i >>> 6] |= mBitmask[i & 0x3f];
}
}
return result;
}
/**
* Reverses the bit-order in this element(according to 1363). This is a
* hack!
*/
void reverseOrder()
{
mPol = getElementReverseOrder();
}
// /////////////////////////////////////////////////////////////////////
// arithmetic
// /////////////////////////////////////////////////////////////////////
/**
* Compute the sum of this element and <tt>addend</tt>.
*
* @param addend the addend
* @return <tt>this + other</tt> (newly created)
* @throws DifferentFieldsException if the elements are of different fields.
*/
public GFElement add(GFElement addend)
throws RuntimeException
{
GF2nONBElement result = new GF2nONBElement(this);
result.addToThis(addend);
return result;
}
/**
* Compute <tt>this + addend</tt> (overwrite <tt>this</tt>).
*
* @param addend the addend
* @throws DifferentFieldsException if the elements are of different fields.
*/
public void addToThis(GFElement addend)
throws RuntimeException
{
if (!(addend instanceof GF2nONBElement))
{
throw new RuntimeException();
}
if (!mField.equals(((GF2nONBElement)addend).mField))
{
throw new RuntimeException();
}
for (int i = 0; i < mLength; i++)
{
mPol[i] ^= ((GF2nONBElement)addend).mPol[i];
}
}
/**
* returns <tt>this</tt> element + 1.
*
* @return <tt>this</tt> + 1
*/
public GF2nElement increase()
{
GF2nONBElement result = new GF2nONBElement(this);
result.increaseThis();
return result;
}
/**
* increases <tt>this</tt> element.
*/
public void increaseThis()
{
addToThis(ONE((GF2nONBField)mField));
}
/**
* Compute the product of this element and <tt>factor</tt>.
*
* @param factor the factor
* @return <tt>this * factor</tt> (newly created)
* @throws DifferentFieldsException if the elements are of different fields.
*/
public GFElement multiply(GFElement factor)
throws RuntimeException
{
GF2nONBElement result = new GF2nONBElement(this);
result.multiplyThisBy(factor);
return result;
}
/**
* Compute <tt>this * factor</tt> (overwrite <tt>this</tt>).
*
* @param factor the factor
* @throws DifferentFieldsException if the elements are of different fields.
*/
public void multiplyThisBy(GFElement factor)
throws RuntimeException
{
if (!(factor instanceof GF2nONBElement))
{
throw new RuntimeException("The elements have different"
+ " representation: not yet" + " implemented");
}
if (!mField.equals(((GF2nONBElement)factor).mField))
{
throw new RuntimeException();
}
if (equals(factor))
{
squareThis();
}
else
{
long[] a = mPol;
long[] b = ((GF2nONBElement)factor).mPol;
long[] c = new long[mLength];
int[][] m = ((GF2nONBField)mField).mMult;
int degf, degb, s, fielda, fieldb, bita, bitb;
degf = mLength - 1;
degb = mBit - 1;
s = 0;
long TWOTOMAXLONGM1 = mBitmask[MAXLONG - 1];
long TWOTODEGB = mBitmask[degb];
boolean old, now;
// the product c of a and b (a*b = c) is calculated in mDegree
// cicles
// in every cicle one coefficient of c is calculated and stored
// k indicates the coefficient
//
for (int k = 0; k < mDegree; k++)
{
s = 0;
for (int i = 0; i < mDegree; i++)
{
// fielda = i / MAXLONG
//
fielda = mIBY64[i];
// bita = i % MAXLONG
//
bita = i & (MAXLONG - 1);
// fieldb = m[i][0] / MAXLONG
//
fieldb = mIBY64[m[i][0]];
// bitb = m[i][0] % MAXLONG
//
bitb = m[i][0] & (MAXLONG - 1);
if ((a[fielda] & mBitmask[bita]) != 0)
{
if ((b[fieldb] & mBitmask[bitb]) != 0)
{
s ^= 1;
}
if (m[i][1] != -1)
{
// fieldb = m[i][1] / MAXLONG
//
fieldb = mIBY64[m[i][1]];
// bitb = m[i][1] % MAXLONG
//
bitb = m[i][1] & (MAXLONG - 1);
if ((b[fieldb] & mBitmask[bitb]) != 0)
{
s ^= 1;
}
}
}
}
fielda = mIBY64[k];
bita = k & (MAXLONG - 1);
if (s != 0)
{
c[fielda] ^= mBitmask[bita];
}
// Circular shift of x and y one bit to the right,
// respectively.
if (mLength > 1)
{
// Shift x.
//
old = (a[degf] & 1) == 1;
for (int i = degf - 1; i >= 0; i--)
{
now = (a[i] & 1) != 0;
a[i] = a[i] >>> 1;
if (old)
{
a[i] ^= TWOTOMAXLONGM1;
}
old = now;
}
a[degf] = a[degf] >>> 1;
if (old)
{
a[degf] ^= TWOTODEGB;
}
// Shift y.
//
old = (b[degf] & 1) == 1;
for (int i = degf - 1; i >= 0; i--)
{
now = (b[i] & 1) != 0;
b[i] = b[i] >>> 1;
if (old)
{
b[i] ^= TWOTOMAXLONGM1;
}
old = now;
}
b[degf] = b[degf] >>> 1;
if (old)
{
b[degf] ^= TWOTODEGB;
}
}
else
{
old = (a[0] & 1) == 1;
a[0] = a[0] >>> 1;
if (old)
{
a[0] ^= TWOTODEGB;
}
old = (b[0] & 1) == 1;
b[0] = b[0] >>> 1;
if (old)
{
b[0] ^= TWOTODEGB;
}
}
}
assign(c);
}
}
/**
* returns <tt>this</tt> element to the power of 2.
*
* @return <tt>this</tt><sup>2</sup>
*/
public GF2nElement square()
{
GF2nONBElement result = new GF2nONBElement(this);
result.squareThis();
return result;
}
/**
* squares <tt>this</tt> element.
*/
public void squareThis()
{
long[] pol = getElement();
int f = mLength - 1;
int b = mBit - 1;
// Shift the coefficients one bit to the left.
//
long TWOTOMAXLONGM1 = mBitmask[MAXLONG - 1];
boolean old, now;
old = (pol[f] & mBitmask[b]) != 0;
for (int i = 0; i < f; i++)
{
now = (pol[i] & TWOTOMAXLONGM1) != 0;
pol[i] = pol[i] << 1;
if (old)
{
pol[i] ^= 1;
}
old = now;
}
now = (pol[f] & mBitmask[b]) != 0;
pol[f] = pol[f] << 1;
if (old)
{
pol[f] ^= 1;
}
// Set the bit with index mDegree to zero.
//
if (now)
{
pol[f] ^= mBitmask[b + 1];
}
assign(pol);
}
/**
* Compute the multiplicative inverse of this element.
*
* @return <tt>this<sup>-1</sup></tt> (newly created)
* @throws ArithmeticException if <tt>this</tt> is the zero element.
*/
public GFElement invert()
throws ArithmeticException
{
GF2nONBElement result = new GF2nONBElement(this);
result.invertThis();
return result;
}
/**
* Multiplicatively invert of this element (overwrite <tt>this</tt>).
*
* @throws ArithmeticException if <tt>this</tt> is the zero element.
*/
public void invertThis()
throws ArithmeticException
{
if (isZero())
{
throw new ArithmeticException();
}
int r = 31; // mDegree kann nur 31 Bits lang sein!!!
// Bitlaenge von mDegree:
for (boolean found = false; !found && r >= 0; r--)
{
if (((mDegree - 1) & mBitmask[r]) != 0)
{
found = true;
}
}
r++;
GF2nElement m = ZERO((GF2nONBField)mField);
GF2nElement n = new GF2nONBElement(this);
int k = 1;
for (int i = r - 1; i >= 0; i--)
{
m = (GF2nElement)n.clone();
for (int j = 1; j <= k; j++)
{
m.squareThis();
}
n.multiplyThisBy(m);
k <<= 1;
if (((mDegree - 1) & mBitmask[i]) != 0)
{
n.squareThis();
n.multiplyThisBy(this);
k++;
}
}
n.squareThis();
}
/**
* returns the root of<tt>this</tt> element.
*
* @return <tt>this</tt><sup>1/2</sup>
*/
public GF2nElement squareRoot()
{
GF2nONBElement result = new GF2nONBElement(this);
result.squareRootThis();
return result;
}
/**
* square roots <tt>this</tt> element.
*/
public void squareRootThis()
{
long[] pol = getElement();
int f = mLength - 1;
int b = mBit - 1;
// Shift the coefficients one bit to the right.
//
long TWOTOMAXLONGM1 = mBitmask[MAXLONG - 1];
boolean old, now;
old = (pol[0] & 1) != 0;
for (int i = f; i >= 0; i--)
{
now = (pol[i] & 1) != 0;
pol[i] = pol[i] >>> 1;
if (old)
{
if (i == f)
{
pol[i] ^= mBitmask[b];
}
else
{
pol[i] ^= TWOTOMAXLONGM1;
}
}
old = now;
}
assign(pol);
}
/**
* Returns the trace of this element.
*
* @return the trace of this element
*/
public int trace()
{
// trace = sum of coefficients
//
int result = 0;
int max = mLength - 1;
for (int i = 0; i < max; i++)
{
for (int j = 0; j < MAXLONG; j++)
{
if ((mPol[i] & mBitmask[j]) != 0)
{
result ^= 1;
}
}
}
int b = mBit;
for (int j = 0; j < b; j++)
{
if ((mPol[max] & mBitmask[j]) != 0)
{
result ^= 1;
}
}
return result;
}
/**
* Solves a quadratic equation.<br>
* Let z<sup>2</sup> + z = <tt>this</tt>. Then this method returns z.
*
* @return z with z<sup>2</sup> + z = <tt>this</tt>
* @throws NoSolutionException if z<sup>2</sup> + z = <tt>this</tt> does not have a
* solution
*/
public GF2nElement solveQuadraticEquation()
throws RuntimeException
{
if (trace() == 1)
{
throw new RuntimeException();
}
long TWOTOMAXLONGM1 = mBitmask[MAXLONG - 1];
long ZERO = 0L;
long ONE = 1L;
long[] p = new long[mLength];
long z = 0L;
int j = 1;
for (int i = 0; i < mLength - 1; i++)
{
for (j = 1; j < MAXLONG; j++)
{
//
if (!((((mBitmask[j] & mPol[i]) != ZERO) && ((z & mBitmask[j - 1]) != ZERO)) || (((mPol[i] & mBitmask[j]) == ZERO) && ((z & mBitmask[j - 1]) == ZERO))))
{
z ^= mBitmask[j];
}
}
p[i] = z;
if (((TWOTOMAXLONGM1 & z) != ZERO && (ONE & mPol[i + 1]) == ONE)
|| ((TWOTOMAXLONGM1 & z) == ZERO && (ONE & mPol[i + 1]) == ZERO))
{
z = ZERO;
}
else
{
z = ONE;
}
}
int b = mDegree & (MAXLONG - 1);
long LASTLONG = mPol[mLength - 1];
for (j = 1; j < b; j++)
{
if (!((((mBitmask[j] & LASTLONG) != ZERO) && ((mBitmask[j - 1] & z) != ZERO)) || (((mBitmask[j] & LASTLONG) == ZERO) && ((mBitmask[j - 1] & z) == ZERO))))
{
z ^= mBitmask[j];
}
}
p[mLength - 1] = z;
return new GF2nONBElement((GF2nONBField)mField, p);
}
// /////////////////////////////////////////////////////////////////
// conversion
// /////////////////////////////////////////////////////////////////
/**
* Returns a String representation of this element.
*
* @return String representation of this element with the specified radix
*/
public String toString()
{
return toString(16);
}
/**
* Returns a String representation of this element. <tt>radix</tt>
* specifies the radix of the String representation.<br>
* NOTE: ONLY <tt>radix = 2</tt> or <tt>radix = 16</tt> IS IMPLEMENTED
*
* @param radix specifies the radix of the String representation
* @return String representation of this element with the specified radix
*/
public String toString(int radix)
{
String s = "";
long[] a = getElement();
int b = mBit;
if (radix == 2)
{
for (int j = b - 1; j >= 0; j--)
{
if ((a[a.length - 1] & ((long)1 << j)) == 0)
{
s += "0";
}
else
{
s += "1";
}
}
for (int i = a.length - 2; i >= 0; i--)
{
for (int j = MAXLONG - 1; j >= 0; j--)
{
if ((a[i] & mBitmask[j]) == 0)
{
s += "0";
}
else
{
s += "1";
}
}
}
}
else if (radix == 16)
{
final char[] HEX_CHARS = {'0', '1', '2', '3', '4', '5', '6', '7',
'8', '9', 'a', 'b', 'c', 'd', 'e', 'f'};
for (int i = a.length - 1; i >= 0; i--)
{
s += HEX_CHARS[(int)(a[i] >>> 60) & 0x0f];
s += HEX_CHARS[(int)(a[i] >>> 56) & 0x0f];
s += HEX_CHARS[(int)(a[i] >>> 52) & 0x0f];
s += HEX_CHARS[(int)(a[i] >>> 48) & 0x0f];
s += HEX_CHARS[(int)(a[i] >>> 44) & 0x0f];
s += HEX_CHARS[(int)(a[i] >>> 40) & 0x0f];
s += HEX_CHARS[(int)(a[i] >>> 36) & 0x0f];
s += HEX_CHARS[(int)(a[i] >>> 32) & 0x0f];
s += HEX_CHARS[(int)(a[i] >>> 28) & 0x0f];
s += HEX_CHARS[(int)(a[i] >>> 24) & 0x0f];
s += HEX_CHARS[(int)(a[i] >>> 20) & 0x0f];
s += HEX_CHARS[(int)(a[i] >>> 16) & 0x0f];
s += HEX_CHARS[(int)(a[i] >>> 12) & 0x0f];
s += HEX_CHARS[(int)(a[i] >>> 8) & 0x0f];
s += HEX_CHARS[(int)(a[i] >>> 4) & 0x0f];
s += HEX_CHARS[(int)(a[i]) & 0x0f];
s += " ";
}
}
return s;
}
/**
* Returns this element as FlexiBigInt. The conversion is <a href =
* "http://grouper.ieee.org/groups/1363/">P1363</a>-conform.
*
* @return this element as BigInteger
*/
public BigInteger toFlexiBigInt()
{
/** @todo this method does not reverse the bit-order as it should!!! */
return new BigInteger(1, toByteArray());
}
/**
* Returns this element as byte array. The conversion is <a href =
* "http://grouper.ieee.org/groups/1363/">P1363</a>-conform.
*
* @return this element as byte array
*/
public byte[] toByteArray()
{
/** @todo this method does not reverse the bit-order as it should!!! */
int k = ((mDegree - 1) >> 3) + 1;
byte[] result = new byte[k];
int i;
for (i = 0; i < k; i++)
{
result[k - i - 1] = (byte)((mPol[i >>> 3] & (0x00000000000000ffL << ((i & 0x07) << 3))) >>> ((i & 0x07) << 3));
}
return result;
}
}
| |
package com.oldterns.vilebot.handlers.user;
import com.oldterns.vilebot.Vilebot;
import com.oldterns.vilebot.db.ChurchDB;
import com.oldterns.vilebot.db.GroupDB;
import com.oldterns.vilebot.db.KarmaDB;
import com.oldterns.vilebot.util.BaseNick;
import com.oldterns.vilebot.util.Ignore;
import com.oldterns.vilebot.util.Sessions;
import org.pircbotx.hooks.ListenerAdapter;
import org.pircbotx.hooks.events.JoinEvent;
import org.pircbotx.hooks.events.MessageEvent;
import org.pircbotx.hooks.events.PrivateMessageEvent;
import org.pircbotx.hooks.types.GenericMessageEvent;
import org.pircbotx.output.OutputIRC;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Objects;
import java.util.Random;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class Karma
extends ListenerAdapter
{
private static final Pattern nounPattern = Pattern.compile( "\\S+" );
private static final Pattern nickBlobPattern = Pattern.compile( "(?:(" + nounPattern + "?)(?:, +| +|$))" );
private static final Pattern incBlobPattern = Pattern.compile( "(?:(" + nounPattern + "?\\+\\+)(?:, +| +|$))" );
private static final Pattern decBlobPattern = Pattern.compile( "(?:(" + nounPattern + "?--)(?:, +| +|$))" );
private static final Pattern incOrDecBlobPattern = Pattern.compile( "(?:(" + nounPattern + "?\\+-)(?:, +| +|$))" );
private static final Pattern incrementPattern = Pattern.compile( "(?:^|^.*\\s+)(" + incBlobPattern + "+)(?:.*|$)" );
private static final Pattern decrementPattern = Pattern.compile( "(?:^|^.*\\s+)(" + decBlobPattern + "+)(?:.*|$)" );
private static final Pattern incOrDecPattern =
Pattern.compile( "(?:^|^.*\\s+)(" + incOrDecBlobPattern + "+)(?:.*|$)" );
// The opening (?:^|^.*\\s+) and closing (?:.*|$) are needed when only part of the message is ++ or -- events
private static final Pattern selfKarmaQueryPattern = Pattern.compile( "^\\s*!(rev|)rank\\s*$" );
private static final Pattern karmaQueryPattern = Pattern.compile( "!(rev|)rank (" + nickBlobPattern + "+)" );
private static final Pattern ranknPattern = Pattern.compile( "!(rev|)rankn ([0-9]+)\\s*" );
private static final Pattern topBottomThreePattern = Pattern.compile( "!(top|bottom)three\\s*" );
private static final Pattern removePattern = Pattern.compile( "!admin unrank (" + nounPattern + ")\\s*" );
private static final Pattern totalPattern = Pattern.compile( "^!total" );
@Override
public void onJoin( JoinEvent event ) // announce karma on join
{
String noun = BaseNick.toBaseNick( Objects.requireNonNull( event.getUser() ).getNick() );
OutputIRC outputQ = event.getBot().send();
String replyTarget = event.getChannel().getName();
if ( !Ignore.getOnJoin().contains( noun ) )
replyWithRankAndKarma( noun, outputQ, replyTarget, false, false, true );
}
@Override
public void onGenericMessage( final GenericMessageEvent event )
{
String text = event.getMessage();
OutputIRC outputQ = event.getBot().send();
String replyTarget;
if ( event instanceof PrivateMessageEvent )
replyTarget = event.getUser().getNick();
else if ( event instanceof MessageEvent )
replyTarget = ( (MessageEvent) event ).getChannel().getName();
else
return;
Matcher incMatcher = incrementPattern.matcher( text );
Matcher decMatcher = decrementPattern.matcher( text );
Matcher incOrDecMatcher = incOrDecPattern.matcher( text );
Matcher specificMatcher = karmaQueryPattern.matcher( text );
Matcher selfMatcher = selfKarmaQueryPattern.matcher( text );
Matcher rankNumberMatcher = ranknPattern.matcher( text );
Matcher totalKarmaMatcher = totalPattern.matcher( text );
Matcher topBottomThreeMatcher = topBottomThreePattern.matcher( text );
Matcher unrankMatcher = removePattern.matcher( text );
if ( incMatcher.matches() )
karmaInc( event, incMatcher );
if ( decMatcher.matches() )
karmaDec( event, decMatcher );
if ( incOrDecMatcher.matches() )
karmaIncOrDec( event, incOrDecMatcher );
if ( specificMatcher.matches() )
specificKarmaQuery( event, outputQ, replyTarget, specificMatcher );
if ( selfMatcher.matches() )
selfKarmaQuery( event, outputQ, replyTarget, selfMatcher );
if ( rankNumberMatcher.matches() )
rankNumber( event, outputQ, replyTarget, rankNumberMatcher );
if ( totalKarmaMatcher.matches() )
totalKarma( event );
if ( topBottomThreeMatcher.matches() )
topBottomThree( event, outputQ, replyTarget, topBottomThreeMatcher );
if ( unrankMatcher.matches() )
unrank( event, outputQ, replyTarget, unrankMatcher );
}
private void karmaInc( GenericMessageEvent event, Matcher incMatcher )
{
if ( isPrivate( event ) )
{
KarmaDB.modNounKarma( Objects.requireNonNull( event.getUser() ).getNick(), -1 );
return;
}
// Prevent users from increasing karma outside of #TheFoobar
if ( !( (MessageEvent) event ).getChannel().getName().equals( Vilebot.getConfig().get( "ircChannel1" ) ) )
{
event.respondWith( "You must be in " + Vilebot.getConfig().get( "ircChannel1" )
+ " to give or receive karma." );
return;
}
// If one match is found, take the entire text of the message (group(0)) and check each word
// This is needed in the case that only part of the message is karma events (ie "wow anestico++")
String wordBlob = incMatcher.group( 0 );
String sender = BaseNick.toBaseNick( Objects.requireNonNull( event.getUser() ).getNick() );
Set<String> nicks = new HashSet<>();
Matcher nickMatcher = incBlobPattern.matcher( wordBlob );
while ( nickMatcher.find() )
{
nicks.add( BaseNick.toBaseNick( nickMatcher.group( 1 ) ) );
}
boolean insult = false;
for ( String nick : nicks )
{
if ( !nick.equals( sender ) )
KarmaDB.modNounKarma( nick, 1 );
else
insult = true;
}
if ( insult )
// TODO insult generator?
event.respondWith( "I think I'm supposed to insult you now." );
}
private void karmaDec( GenericMessageEvent event, Matcher decMatcher )
{
if ( isPrivate( event ) )
{
KarmaDB.modNounKarma( Objects.requireNonNull( event.getUser() ).getNick(), -1 );
return;
}
// Prevent users from decreasing karma outside of #TheFoobar
if ( !( (MessageEvent) event ).getChannel().getName().equals( Vilebot.getConfig().get( "ircChannel1" ) ) )
{
event.respondWith( "You must be in " + Vilebot.getConfig().get( "ircChannel1" )
+ " to give or receive karma." );
return;
}
// If one match is found, take the entire text of the message (group(0)) and check each word
String wordBlob = decMatcher.group( 0 );
List<String> nicks = new LinkedList<>();
Matcher nickMatcher = decBlobPattern.matcher( wordBlob );
while ( nickMatcher.find() )
{
nicks.add( BaseNick.toBaseNick( nickMatcher.group( 1 ) ) );
}
boolean insult = false;
String botNick = event.getBot().getNick();
for ( String nick : nicks )
{
if ( !nick.equals( botNick ) )
KarmaDB.modNounKarma( nick, -1 );
else
insult = true;
}
if ( insult )
// TODO insult generator?
event.respondWith( "I think I'm supposed to insult you now." );
}
private void karmaIncOrDec( GenericMessageEvent event, Matcher incOrDecMatcher )
{
if ( isPrivate( event ) )
{
KarmaDB.modNounKarma( Objects.requireNonNull( event.getUser() ).getNick(), -1 );
return;
}
// Prevent users from increasing karma outside of #TheFoobar
if ( !( (MessageEvent) event ).getChannel().getName().equals( Vilebot.getConfig().get( "ircChannel1" ) ) )
{
event.respondWith( "You must be in " + Vilebot.getConfig().get( "ircChannel1" )
+ " to give or receive karma." );
return;
}
// If one match is found, take the entire text of the message (group(0)) and check each word
// This is needed in the case that only part of the message is karma events (ie "wow anestico++")
String wordBlob = incOrDecMatcher.group( 0 );
String sender = BaseNick.toBaseNick( Objects.requireNonNull( event.getUser() ).getNick() );
Set<String> nicks = new HashSet<>();
Matcher nickMatcher = incOrDecBlobPattern.matcher( wordBlob );
while ( nickMatcher.find() )
{
nicks.add( BaseNick.toBaseNick( nickMatcher.group( 1 ) ) );
}
boolean insult = false;
for ( String nick : nicks )
{
if ( !nick.equals( sender ) )
decideIncOrDec( event, nick );
else
insult = true;
}
if ( insult )
// TODO insult generator?
event.respondWith( "I think I'm supposed to insult you now." );
}
private void decideIncOrDec( GenericMessageEvent event, String nick )
{
int karma = 0;
Random rand = new Random();
while ( karma == 0 )
{
karma = rand.nextInt( 3 ) - 1;
}
String reply = nick + " had their karma ";
reply += karma == 1 ? "increased" : "decreased";
reply += " by 1";
event.respondWith( reply );
KarmaDB.modNounKarma( nick, karma );
}
private boolean isPrivate( GenericMessageEvent event )
{
return event instanceof PrivateMessageEvent;
}
private void specificKarmaQuery( GenericMessageEvent event, OutputIRC outputQ, String replyTarget,
Matcher specificMatcher )
{
String mode = specificMatcher.group( 1 );
String nickBlob = specificMatcher.group( 2 );
List<String> nicks = new LinkedList<>();
Matcher nickMatcher = nickBlobPattern.matcher( nickBlob );
while ( nickMatcher.find() )
{
nicks.add( BaseNick.toBaseNick( nickMatcher.group( 1 ) ) );
}
boolean reverse = "rev".equals( mode );
for ( String nick : nicks )
{
if ( !replyWithRankAndKarma( nick, outputQ, replyTarget, reverse ) )
event.respondWith( nick + " has no karma." );
}
}
private void selfKarmaQuery( GenericMessageEvent event, OutputIRC outputQ, String replyTarget, Matcher selfMatcher )
{
String mode = selfMatcher.group( 1 );
String noun = BaseNick.toBaseNick( event.getUser().getNick() );
boolean reverse = "rev".equals( mode );
if ( !replyWithRankAndKarma( noun, outputQ, replyTarget, reverse ) )
event.respondWith( noun + " has no karma." );
}
private void rankNumber( GenericMessageEvent event, OutputIRC outputQ, String replyTarget,
Matcher rankNumberMatcher )
{
String mode = rankNumberMatcher.group( 1 );
String place = rankNumberMatcher.group( 2 );
boolean reverse = "rev".equals( mode );
String noun;
if ( reverse )
noun = KarmaDB.getRevRankNoun( Long.parseLong( place ) );
else
noun = KarmaDB.getRankNoun( Long.parseLong( place ) );
if ( noun != null )
replyWithRankAndKarma( noun, outputQ, replyTarget, reverse );
else
event.respondWith( "No noun at that rank." );
}
private void totalKarma( GenericMessageEvent event )
{
event.respondWith( "" + KarmaDB.getTotalKarma() );
}
private void topBottomThree( GenericMessageEvent event, OutputIRC outputQ, String replyTarget,
Matcher topBottomThreeMatcher )
{
String mode = topBottomThreeMatcher.group( 1 );
Set<String> nouns = null;
if ( "top".equals( mode ) )
{
nouns = KarmaDB.getRankNouns( 0, 2 );
}
else if ( "bottom".equals( mode ) )
{
nouns = KarmaDB.getRevRankNouns( 0, 2 );
}
if ( nouns != null && nouns.size() > 0 )
{
for ( String noun : nouns )
{
replyWithRankAndKarma( noun, outputQ, replyTarget, false, true );
}
}
else
{
event.respondWith( "No nouns at ranks 1 to 3." );
}
}
private void unrank( GenericMessageEvent event, OutputIRC outputQ, String replyTarget, Matcher unrankMatcher )
{
// Admin-only command: remove all of a user's karma.
// Matcher matcher = removePattern.matcher( event.getMessage() );
String username = Sessions.getSession( event.getUser().getNick() );
if ( GroupDB.isAdmin( username ) )
{
String noun = BaseNick.toBaseNick( unrankMatcher.group( 1 ) );
if ( replyWithRankAndKarma( noun, outputQ, replyTarget ) )
{
event.respondWith( "Removing " + noun + "." );
KarmaDB.remNoun( noun );
}
else
{
event.respondWith( noun + " isn't ranked." );
}
}
}
private static boolean replyWithRankAndKarma( String noun, OutputIRC outputQ, String replyTarget )
{
return replyWithRankAndKarma( noun, outputQ, replyTarget, false );
}
private static boolean replyWithRankAndKarma( String noun, OutputIRC outputQ, String replyTarget,
boolean reverseOrder )
{
return replyWithRankAndKarma( noun, outputQ, replyTarget, reverseOrder, false );
}
private static boolean replyWithRankAndKarma( String noun, OutputIRC outputQ, String replyTarget,
boolean reverseOrder, boolean obfuscateNick )
{
return replyWithRankAndKarma( noun, outputQ, replyTarget, reverseOrder, obfuscateNick, false );
}
private static boolean replyWithRankAndKarma( String noun, OutputIRC outputQ, String replyTarget,
boolean reverseOrder, boolean obfuscateNick, boolean useTitle )
{
Integer nounRank;
if ( reverseOrder )
nounRank = KarmaDB.getNounRevRank( noun );
else
nounRank = KarmaDB.getNounRank( noun );
Integer nounKarma = KarmaDB.getNounKarma( noun );
if ( useTitle && ChurchDB.isTopDonor( noun ) )
{
String title = ChurchDB.getDonorTitle( noun );
if ( title.trim().length() > 0 )
{
noun = title;
}
}
if ( nounKarma != null )
{
StringBuilder sb = new StringBuilder();
sb.append( noun );
if ( obfuscateNick )
sb.reverse();
sb.append( " is " );
sb.append( "ranked at " );
if ( reverseOrder )
sb.append( "(reverse) " );
sb.append( "#" );
sb.append( nounRank );
sb.append( " with " );
sb.append( nounKarma );
sb.append( " points of karma." );
outputQ.message( replyTarget, sb.toString() );
return true;
}
return false;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.oodt.cas.workflow.util;
//JDK imports
import java.util.Arrays;
import java.util.List;
import java.util.Properties;
import java.util.Vector;
import java.util.HashMap;
import java.util.logging.Logger;
import org.w3c.dom.Node;
import org.w3c.dom.Text;
import org.w3c.dom.NodeList;
import org.w3c.dom.Element;
//OODT imports
import org.apache.oodt.cas.metadata.Metadata;
import org.apache.oodt.cas.metadata.util.PathUtils;
import org.apache.oodt.cas.workflow.structs.Workflow;
import org.apache.oodt.cas.workflow.structs.WorkflowConditionConfiguration;
import org.apache.oodt.cas.workflow.structs.WorkflowTask;
import org.apache.oodt.cas.workflow.structs.WorkflowTaskConfiguration;
import org.apache.oodt.cas.workflow.structs.WorkflowCondition;
/**
* * A class for constructing Workflow Manager objects from XML {@link Node}s
* and {@link Element}s.
*
* @author mattmann
* @version $Revsion$
*
*/
public final class XmlStructFactory {
/* our log stream */
public static Logger LOG = Logger.getLogger(XmlStructFactory.class.getName());
private XmlStructFactory() throws InstantiationException {
throw new InstantiationException("Don't instantiate XML Struct Factories!");
}
/**
* <p>
* Creates {@link Workflow}s from the XML Node and the map of existing
* {@link WorkflowTask}s.
* </p>
*
* @param node
* The XML node to construct the Workflow from.
* @param tasks
* The {@link HashMap} of existing {@link WorkflowTask}s.
*
* @param conditions
* The {@link HashMap} of existing {@link WorkflowCondition}s.
*
* @return A new {@link Workflow} created from the XML node.
*/
public static Workflow getWorkflow(Node node, HashMap tasks,
HashMap conditions) {
Element workflowRoot = (Element) node;
String id = workflowRoot.getAttribute("id");
String name = workflowRoot.getAttribute("name");
Workflow workflow = new Workflow();
workflow.setName(name);
workflow.setId(id);
Element taskElem = getFirstElement("tasks", workflowRoot);
Element conditionsElem = getFirstElement("conditions", workflowRoot);
if (taskElem != null)
workflow.setTasks(getTasks(taskElem, tasks));
if (conditionsElem != null)
workflow.setConditions(getConditions(conditionsElem, conditions));
return workflow;
}
/**
* <p>
* Constructs a new {@link WorkflowTask} from the given XML node and
* {@link HashMap} of {@link WorkflowCondition}s.
* </p>
*
* @param node
* The XML node to construct the {@link WorkflowTask} from.
* @param conditions
* The {@link HashMap} of {@link WorkflowCondition}s to use when
* constructing the WorkflowTask.
* @return A new {@link WorkflowTask} created from the given XML node.
*/
public static WorkflowTask getWorkflowTask(Node node, HashMap conditions) {
Element taskNode = (Element) node;
String taskClassName = taskNode.getAttribute("class");
WorkflowTask task = new WorkflowTask();
task.setTaskInstanceClassName(taskClassName);
task.setTaskId(taskNode.getAttribute("id"));
task.setTaskName(taskNode.getAttribute("name"));
// get the list of condition IDs for this task and then get the
// conditions for them
task.setConditions(getConditions(taskNode, conditions));
Element reqMetFieldsElem = getFirstElement("requiredMetFields", taskNode);
if (reqMetFieldsElem != null) {
task.setRequiredMetFields(getRequiredMetFields(reqMetFieldsElem));
}
// load its configuration
Element configElement = getFirstElement("configuration", taskNode);
if (configElement != null) {
task.setTaskConfig(new WorkflowTaskConfiguration(
getConfiguration(configElement)));
}
return task;
}
/**
* <p>
* Constructs a new {@link WorkflowCondition} from the given XML Node.
* </p>
*
* @param node
* The XML node to construct the WorkflowCondition from.
* @return A new {@link WorkflowCondition} from the given XML node.
*/
public static WorkflowCondition getWorkflowCondition(Node node) {
Element conditionElement = (Element) node;
String conditionClassName = conditionElement.getAttribute("class");
WorkflowCondition condition = new WorkflowCondition();
condition.setConditionInstanceClassName(conditionClassName);
condition.setConditionId(conditionElement.getAttribute("id"));
condition.setConditionName(conditionElement.getAttribute("name"));
condition
.setTimeoutSeconds(Long.valueOf(conditionElement
.getAttribute("timeout") != null
&& !conditionElement.getAttribute("timeout").equals("") ? conditionElement
.getAttribute("timeout") : "-1"));
condition.setOptional(Boolean.valueOf(conditionElement
.getAttribute("optional")));
// load its configuration
Element configElement = getFirstElement("configuration", conditionElement);
if (configElement != null) {
condition.setCondConfig(new WorkflowConditionConfiguration(
getConfiguration(configElement)));
}
return condition;
}
/**
*
* @param node
* @return
*/
public static List getRequiredMetFields(Node node) {
Element reqMetFieldsElem = (Element) node;
NodeList reqMetFieldNodes = reqMetFieldsElem
.getElementsByTagName("metfield");
List reqFields = null;
if (reqMetFieldNodes != null && reqMetFieldNodes.getLength() > 0) {
reqFields = new Vector(reqMetFieldNodes.getLength());
for (int i = 0; i < reqMetFieldNodes.getLength(); i++) {
Element reqMetFieldElem = (Element) reqMetFieldNodes.item(i);
String reqFieldName = reqMetFieldElem.getAttribute("name");
reqFields.add(reqFieldName);
}
}
return reqFields;
}
/**
* <p>
* Constructs a new {@link WorkflowTaskConfiguration} from the given XML node.
* </p>
*
* @param node
* The XML node to construct the WorkflowTaskConfiguration from.
* @return A new {@link WorkflowTaskConfiguration} constructed from the given
* XML node.
*/
public static Properties getConfiguration(Node node) {
Element configNode = (Element) node;
NodeList configProperties = configNode.getElementsByTagName("property");
if (configProperties == null) {
return null;
}
Properties properties = new Properties();
for (int i = 0; i < configProperties.getLength(); i++) {
Element propElem = (Element) configProperties.item(i);
String value = propElem.getAttribute("value");
boolean doReplace = Boolean.valueOf(propElem.getAttribute("envReplace"))
.booleanValue();
if (doReplace) {
value = PathUtils.replaceEnvVariables(value);
}
properties.put(propElem.getAttribute("name"), value);
}
return properties;
}
public static Metadata getConfigurationAsMetadata(Node configNode)
throws Exception {
Metadata curMetadata = new Metadata();
NodeList curGrandChildren = configNode.getChildNodes();
for (int k = 0; k < curGrandChildren.getLength(); k++) {
if (curGrandChildren.item(k).getNodeName().equals("property")) {
Element property = (Element) curGrandChildren.item(k);
String delim = property.getAttribute("delim");
String envReplace = property.getAttribute("envReplace");
String name = property.getAttribute("name");
String value = property.getAttribute("value");
if (Boolean.parseBoolean(envReplace))
value = PathUtils.doDynamicReplacement(value);
List<String> values = new Vector<String>();
if (delim.length() > 0)
values.addAll(Arrays.asList(value.split("\\" + delim)));
else
values.add(value);
curMetadata.replaceMetadata(name, values);
}
}
return curMetadata;
}
private static List<WorkflowTask> getTasks(Element rootNode, HashMap tasks) {
NodeList taskList = rootNode.getElementsByTagName("task");
List<WorkflowTask> workflowTasks = null;
if (taskList != null && taskList.getLength() > 0) {
workflowTasks = new Vector<WorkflowTask>(taskList.getLength());
for (int i = 0; i < taskList.getLength(); i++) {
Element taskElement = (Element) taskList.item(i);
WorkflowTask t = (WorkflowTask) tasks.get(taskElement
.getAttribute("id"));
if (t != null) {
WorkflowTask workflowTask = new WorkflowTask();
workflowTask.setTaskInstanceClassName(t.getTaskInstanceClassName());
workflowTask.setConditions(t.getConditions());
workflowTask.setTaskId(t.getTaskId());
workflowTask.setTaskConfig(t.getTaskConfig());
workflowTask.setTaskName(t.getTaskName());
workflowTask.setOrder(i + 1);
workflowTask.setRequiredMetFields(t.getRequiredMetFields());
workflowTasks.add(workflowTask);
}
}
}
return workflowTasks;
}
private static List<WorkflowCondition> getConditions(Element rootNode,
HashMap conditions) {
List<WorkflowCondition> conditionList = new Vector<WorkflowCondition>();
NodeList conditionNodes = rootNode.getElementsByTagName("condition");
if (conditionNodes != null && conditionNodes.getLength() > 0) {
conditionList = new Vector<WorkflowCondition>(conditionNodes.getLength());
for (int i = 0; i < conditionNodes.getLength(); i++) {
Element conditionNode = (Element) conditionNodes.item(i);
WorkflowCondition condition = (WorkflowCondition) conditions
.get(conditionNode.getAttribute("id"));
if (condition != null) {
WorkflowCondition workflowCondition = new WorkflowCondition();
workflowCondition.setConditionInstanceClassName(condition
.getConditionInstanceClassName());
workflowCondition.setConditionId(condition.getConditionId());
workflowCondition.setConditionName(condition.getConditionName());
workflowCondition.setOrder(i + 1);
workflowCondition.setTimeoutSeconds(condition.getTimeoutSeconds());
workflowCondition.setOptional(condition.isOptional());
workflowCondition.setCondConfig(condition.getCondConfig());
conditionList.add(workflowCondition);
}
}
}
return conditionList;
}
private static Element getFirstElement(String name, Element root) {
NodeList list = root.getElementsByTagName(name);
if (list != null) {
return (Element) list.item(0);
} else
return null;
}
private static String getSimpleElementText(Element node) {
if (node.getChildNodes().item(0) instanceof Text) {
return node.getChildNodes().item(0).getNodeValue();
} else
return null;
}
private static String getElementText(String elemName, Element root) {
Element elem = getFirstElement(elemName, root);
return getSimpleElementText(elem);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.client;
import static java.util.stream.Collectors.toList;
import static org.apache.hadoop.hbase.client.ConnectionUtils.checkHasFamilies;
import static org.apache.hadoop.hbase.client.ConnectionUtils.isEmptyStopRow;
import com.google.protobuf.RpcChannel;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Function;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder;
import org.apache.hadoop.hbase.filter.BinaryComparator;
import org.apache.hadoop.hbase.ipc.HBaseRpcController;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ReflectionUtils;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
import org.apache.hadoop.hbase.shaded.protobuf.ResponseConverter;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.GetResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType;
/**
* The implementation of RawAsyncTable.
*/
@InterfaceAudience.Private
class RawAsyncTableImpl implements RawAsyncTable {
private final AsyncConnectionImpl conn;
private final TableName tableName;
private final int defaultScannerCaching;
private final long defaultScannerMaxResultSize;
private final long rpcTimeoutNs;
private final long readRpcTimeoutNs;
private final long writeRpcTimeoutNs;
private final long operationTimeoutNs;
private final long scanTimeoutNs;
private final long pauseNs;
private final int maxAttempts;
private final int startLogErrorsCnt;
RawAsyncTableImpl(AsyncConnectionImpl conn, AsyncTableBuilderBase<?> builder) {
this.conn = conn;
this.tableName = builder.tableName;
this.rpcTimeoutNs = builder.rpcTimeoutNs;
this.readRpcTimeoutNs = builder.readRpcTimeoutNs;
this.writeRpcTimeoutNs = builder.writeRpcTimeoutNs;
this.operationTimeoutNs = builder.operationTimeoutNs;
this.scanTimeoutNs = builder.scanTimeoutNs;
this.pauseNs = builder.pauseNs;
this.maxAttempts = builder.maxAttempts;
this.startLogErrorsCnt = builder.startLogErrorsCnt;
this.defaultScannerCaching = tableName.isSystemTable() ? conn.connConf.getMetaScannerCaching()
: conn.connConf.getScannerCaching();
this.defaultScannerMaxResultSize = conn.connConf.getScannerMaxResultSize();
}
@Override
public TableName getName() {
return tableName;
}
@Override
public Configuration getConfiguration() {
return conn.getConfiguration();
}
@FunctionalInterface
private interface Converter<D, I, S> {
D convert(I info, S src) throws IOException;
}
@FunctionalInterface
private interface RpcCall<RESP, REQ> {
void call(ClientService.Interface stub, HBaseRpcController controller, REQ req,
RpcCallback<RESP> done);
}
private static <REQ, PREQ, PRESP, RESP> CompletableFuture<RESP> call(
HBaseRpcController controller, HRegionLocation loc, ClientService.Interface stub, REQ req,
Converter<PREQ, byte[], REQ> reqConvert, RpcCall<PRESP, PREQ> rpcCall,
Converter<RESP, HBaseRpcController, PRESP> respConverter) {
CompletableFuture<RESP> future = new CompletableFuture<>();
try {
rpcCall.call(stub, controller, reqConvert.convert(loc.getRegion().getRegionName(), req),
new RpcCallback<PRESP>() {
@Override
public void run(PRESP resp) {
if (controller.failed()) {
future.completeExceptionally(controller.getFailed());
} else {
try {
future.complete(respConverter.convert(controller, resp));
} catch (IOException e) {
future.completeExceptionally(e);
}
}
}
});
} catch (IOException e) {
future.completeExceptionally(e);
}
return future;
}
private static <REQ, RESP> CompletableFuture<RESP> mutate(HBaseRpcController controller,
HRegionLocation loc, ClientService.Interface stub, REQ req,
Converter<MutateRequest, byte[], REQ> reqConvert,
Converter<RESP, HBaseRpcController, MutateResponse> respConverter) {
return call(controller, loc, stub, req, reqConvert, (s, c, r, done) -> s.mutate(c, r, done),
respConverter);
}
private static <REQ> CompletableFuture<Void> voidMutate(HBaseRpcController controller,
HRegionLocation loc, ClientService.Interface stub, REQ req,
Converter<MutateRequest, byte[], REQ> reqConvert) {
return mutate(controller, loc, stub, req, reqConvert, (c, resp) -> {
return null;
});
}
private static Result toResult(HBaseRpcController controller, MutateResponse resp)
throws IOException {
if (!resp.hasResult()) {
return null;
}
return ProtobufUtil.toResult(resp.getResult(), controller.cellScanner());
}
@FunctionalInterface
private interface NoncedConverter<D, I, S> {
D convert(I info, S src, long nonceGroup, long nonce) throws IOException;
}
private <REQ, RESP> CompletableFuture<RESP> noncedMutate(HBaseRpcController controller,
HRegionLocation loc, ClientService.Interface stub, REQ req,
NoncedConverter<MutateRequest, byte[], REQ> reqConvert,
Converter<RESP, HBaseRpcController, MutateResponse> respConverter) {
long nonceGroup = conn.getNonceGenerator().getNonceGroup();
long nonce = conn.getNonceGenerator().newNonce();
return mutate(controller, loc, stub, req,
(info, src) -> reqConvert.convert(info, src, nonceGroup, nonce), respConverter);
}
private <T> SingleRequestCallerBuilder<T> newCaller(byte[] row, long rpcTimeoutNs) {
return conn.callerFactory.<T> single().table(tableName).row(row)
.rpcTimeout(rpcTimeoutNs, TimeUnit.NANOSECONDS)
.operationTimeout(operationTimeoutNs, TimeUnit.NANOSECONDS)
.pause(pauseNs, TimeUnit.NANOSECONDS).maxAttempts(maxAttempts)
.startLogErrorsCnt(startLogErrorsCnt);
}
private <T> SingleRequestCallerBuilder<T> newCaller(Row row, long rpcTimeoutNs) {
return newCaller(row.getRow(), rpcTimeoutNs);
}
@Override
public CompletableFuture<Result> get(Get get) {
return this.<Result> newCaller(get, readRpcTimeoutNs)
.action((controller, loc, stub) -> RawAsyncTableImpl
.<Get, GetRequest, GetResponse, Result> call(controller, loc, stub, get,
RequestConverter::buildGetRequest, (s, c, req, done) -> s.get(c, req, done),
(c, resp) -> ProtobufUtil.toResult(resp.getResult(), c.cellScanner())))
.call();
}
@Override
public CompletableFuture<Void> put(Put put) {
return this.<Void> newCaller(put, writeRpcTimeoutNs)
.action((controller, loc, stub) -> RawAsyncTableImpl.<Put> voidMutate(controller, loc, stub,
put, RequestConverter::buildMutateRequest))
.call();
}
@Override
public CompletableFuture<Void> delete(Delete delete) {
return this.<Void> newCaller(delete, writeRpcTimeoutNs)
.action((controller, loc, stub) -> RawAsyncTableImpl.<Delete> voidMutate(controller, loc,
stub, delete, RequestConverter::buildMutateRequest))
.call();
}
@Override
public CompletableFuture<Result> append(Append append) {
checkHasFamilies(append);
return this.<Result> newCaller(append, rpcTimeoutNs)
.action((controller, loc, stub) -> this.<Append, Result> noncedMutate(controller, loc, stub,
append, RequestConverter::buildMutateRequest, RawAsyncTableImpl::toResult))
.call();
}
@Override
public CompletableFuture<Result> increment(Increment increment) {
checkHasFamilies(increment);
return this.<Result> newCaller(increment, rpcTimeoutNs)
.action((controller, loc, stub) -> this.<Increment, Result> noncedMutate(controller, loc,
stub, increment, RequestConverter::buildMutateRequest, RawAsyncTableImpl::toResult))
.call();
}
private final class CheckAndMutateBuilderImpl implements CheckAndMutateBuilder {
private final byte[] row;
private final byte[] family;
private byte[] qualifier;
private CompareOperator op;
private byte[] value;
public CheckAndMutateBuilderImpl(byte[] row, byte[] family) {
this.row = Preconditions.checkNotNull(row, "row is null");
this.family = Preconditions.checkNotNull(family, "family is null");
}
@Override
public CheckAndMutateBuilder qualifier(byte[] qualifier) {
this.qualifier = Preconditions.checkNotNull(qualifier, "qualifier is null. Consider using" +
" an empty byte array, or just do not call this method if you want a null qualifier");
return this;
}
@Override
public CheckAndMutateBuilder ifNotExists() {
this.op = CompareOperator.EQUAL;
this.value = null;
return this;
}
@Override
public CheckAndMutateBuilder ifMatches(CompareOperator compareOp, byte[] value) {
this.op = Preconditions.checkNotNull(compareOp, "compareOp is null");
this.value = Preconditions.checkNotNull(value, "value is null");
return this;
}
private void preCheck() {
Preconditions.checkNotNull(op, "condition is null. You need to specify the condition by" +
" calling ifNotExists/ifEquals/ifMatches before executing the request");
}
@Override
public CompletableFuture<Boolean> thenPut(Put put) {
preCheck();
return RawAsyncTableImpl.this.<Boolean> newCaller(row, rpcTimeoutNs)
.action((controller, loc, stub) -> RawAsyncTableImpl.<Put, Boolean> mutate(controller,
loc, stub, put,
(rn, p) -> RequestConverter.buildMutateRequest(rn, row, family, qualifier,
new BinaryComparator(value), CompareType.valueOf(op.name()), p),
(c, r) -> r.getProcessed()))
.call();
}
@Override
public CompletableFuture<Boolean> thenDelete(Delete delete) {
preCheck();
return RawAsyncTableImpl.this.<Boolean> newCaller(row, rpcTimeoutNs)
.action((controller, loc, stub) -> RawAsyncTableImpl.<Delete, Boolean> mutate(controller,
loc, stub, delete,
(rn, d) -> RequestConverter.buildMutateRequest(rn, row, family, qualifier,
new BinaryComparator(value), CompareType.valueOf(op.name()), d),
(c, r) -> r.getProcessed()))
.call();
}
@Override
public CompletableFuture<Boolean> thenMutate(RowMutations mutation) {
preCheck();
return RawAsyncTableImpl.this.<Boolean> newCaller(mutation, rpcTimeoutNs)
.action((controller, loc, stub) -> RawAsyncTableImpl.<Boolean> mutateRow(controller, loc,
stub, mutation,
(rn, rm) -> RequestConverter.buildMutateRequest(rn, row, family, qualifier,
new BinaryComparator(value), CompareType.valueOf(op.name()), rm),
resp -> resp.getExists()))
.call();
}
}
@Override
public CheckAndMutateBuilder checkAndMutate(byte[] row, byte[] family) {
return new CheckAndMutateBuilderImpl(row, family);
}
// We need the MultiRequest when constructing the org.apache.hadoop.hbase.client.MultiResponse,
// so here I write a new method as I do not want to change the abstraction of call method.
private static <RESP> CompletableFuture<RESP> mutateRow(HBaseRpcController controller,
HRegionLocation loc, ClientService.Interface stub, RowMutations mutation,
Converter<MultiRequest, byte[], RowMutations> reqConvert,
Function<Result, RESP> respConverter) {
CompletableFuture<RESP> future = new CompletableFuture<>();
try {
byte[] regionName = loc.getRegion().getRegionName();
MultiRequest req = reqConvert.convert(regionName, mutation);
stub.multi(controller, req, new RpcCallback<MultiResponse>() {
@Override
public void run(MultiResponse resp) {
if (controller.failed()) {
future.completeExceptionally(controller.getFailed());
} else {
try {
org.apache.hadoop.hbase.client.MultiResponse multiResp =
ResponseConverter.getResults(req, resp, controller.cellScanner());
Throwable ex = multiResp.getException(regionName);
if (ex != null) {
future
.completeExceptionally(ex instanceof IOException ? ex
: new IOException(
"Failed to mutate row: " + Bytes.toStringBinary(mutation.getRow()),
ex));
} else {
future.complete(respConverter
.apply((Result) multiResp.getResults().get(regionName).result.get(0)));
}
} catch (IOException e) {
future.completeExceptionally(e);
}
}
}
});
} catch (IOException e) {
future.completeExceptionally(e);
}
return future;
}
@Override
public CompletableFuture<Void> mutateRow(RowMutations mutation) {
return this.<Void> newCaller(mutation, writeRpcTimeoutNs).action((controller, loc,
stub) -> RawAsyncTableImpl.<Void> mutateRow(controller, loc, stub, mutation, (rn, rm) -> {
RegionAction.Builder regionMutationBuilder = RequestConverter.buildRegionAction(rn, rm);
regionMutationBuilder.setAtomic(true);
return MultiRequest.newBuilder().addRegionAction(regionMutationBuilder.build()).build();
}, resp -> null)).call();
}
private Scan setDefaultScanConfig(Scan scan) {
// always create a new scan object as we may reset the start row later.
Scan newScan = ReflectionUtils.newInstance(scan.getClass(), scan);
if (newScan.getCaching() <= 0) {
newScan.setCaching(defaultScannerCaching);
}
if (newScan.getMaxResultSize() <= 0) {
newScan.setMaxResultSize(defaultScannerMaxResultSize);
}
return newScan;
}
@Override
public CompletableFuture<List<Result>> scanAll(Scan scan) {
CompletableFuture<List<Result>> future = new CompletableFuture<>();
List<Result> scanResults = new ArrayList<>();
scan(scan, new RawScanResultConsumer() {
@Override
public void onNext(Result[] results, ScanController controller) {
scanResults.addAll(Arrays.asList(results));
}
@Override
public void onError(Throwable error) {
future.completeExceptionally(error);
}
@Override
public void onComplete() {
future.complete(scanResults);
}
});
return future;
}
public void scan(Scan scan, RawScanResultConsumer consumer) {
new AsyncClientScanner(setDefaultScanConfig(scan), consumer, tableName, conn, pauseNs,
maxAttempts, scanTimeoutNs, readRpcTimeoutNs, startLogErrorsCnt).start();
}
@Override
public List<CompletableFuture<Result>> get(List<Get> gets) {
return batch(gets, readRpcTimeoutNs);
}
@Override
public List<CompletableFuture<Void>> put(List<Put> puts) {
return voidMutate(puts);
}
@Override
public List<CompletableFuture<Void>> delete(List<Delete> deletes) {
return voidMutate(deletes);
}
@Override
public <T> List<CompletableFuture<T>> batch(List<? extends Row> actions) {
return batch(actions, rpcTimeoutNs);
}
private List<CompletableFuture<Void>> voidMutate(List<? extends Row> actions) {
return this.<Object> batch(actions, writeRpcTimeoutNs).stream()
.map(f -> f.<Void> thenApply(r -> null)).collect(toList());
}
private <T> List<CompletableFuture<T>> batch(List<? extends Row> actions, long rpcTimeoutNs) {
return conn.callerFactory.batch().table(tableName).actions(actions)
.operationTimeout(operationTimeoutNs, TimeUnit.NANOSECONDS)
.rpcTimeout(rpcTimeoutNs, TimeUnit.NANOSECONDS).pause(pauseNs, TimeUnit.NANOSECONDS)
.maxAttempts(maxAttempts).startLogErrorsCnt(startLogErrorsCnt).call();
}
@Override
public long getRpcTimeout(TimeUnit unit) {
return unit.convert(rpcTimeoutNs, TimeUnit.NANOSECONDS);
}
@Override
public long getReadRpcTimeout(TimeUnit unit) {
return unit.convert(readRpcTimeoutNs, TimeUnit.NANOSECONDS);
}
@Override
public long getWriteRpcTimeout(TimeUnit unit) {
return unit.convert(writeRpcTimeoutNs, TimeUnit.NANOSECONDS);
}
@Override
public long getOperationTimeout(TimeUnit unit) {
return unit.convert(operationTimeoutNs, TimeUnit.NANOSECONDS);
}
@Override
public long getScanTimeout(TimeUnit unit) {
return unit.convert(scanTimeoutNs, TimeUnit.NANOSECONDS);
}
private <S, R> CompletableFuture<R> coprocessorService(Function<RpcChannel, S> stubMaker,
CoprocessorCallable<S, R> callable, RegionInfo region, byte[] row) {
RegionCoprocessorRpcChannelImpl channel = new RegionCoprocessorRpcChannelImpl(conn, tableName,
region, row, rpcTimeoutNs, operationTimeoutNs);
S stub = stubMaker.apply(channel);
CompletableFuture<R> future = new CompletableFuture<>();
ClientCoprocessorRpcController controller = new ClientCoprocessorRpcController();
callable.call(stub, controller, resp -> {
if (controller.failed()) {
future.completeExceptionally(controller.getFailed());
} else {
future.complete(resp);
}
});
return future;
}
@Override
public <S, R> CompletableFuture<R> coprocessorService(Function<RpcChannel, S> stubMaker,
CoprocessorCallable<S, R> callable, byte[] row) {
return coprocessorService(stubMaker, callable, null, row);
}
private boolean locateFinished(RegionInfo region, byte[] endKey, boolean endKeyInclusive) {
if (isEmptyStopRow(endKey)) {
if (isEmptyStopRow(region.getEndKey())) {
return true;
}
return false;
} else {
if (isEmptyStopRow(region.getEndKey())) {
return true;
}
int c = Bytes.compareTo(endKey, region.getEndKey());
// 1. if the region contains endKey
// 2. endKey is equal to the region's endKey and we do not want to include endKey.
return c < 0 || c == 0 && !endKeyInclusive;
}
}
private <S, R> void onLocateComplete(Function<RpcChannel, S> stubMaker,
CoprocessorCallable<S, R> callable, CoprocessorCallback<R> callback,
List<HRegionLocation> locs, byte[] endKey, boolean endKeyInclusive,
AtomicBoolean locateFinished, AtomicInteger unfinishedRequest, HRegionLocation loc,
Throwable error) {
if (error != null) {
callback.onError(error);
return;
}
unfinishedRequest.incrementAndGet();
RegionInfo region = loc.getRegion();
if (locateFinished(region, endKey, endKeyInclusive)) {
locateFinished.set(true);
} else {
conn.getLocator()
.getRegionLocation(tableName, region.getEndKey(), RegionLocateType.CURRENT,
operationTimeoutNs)
.whenComplete((l, e) -> onLocateComplete(stubMaker, callable, callback, locs, endKey,
endKeyInclusive, locateFinished, unfinishedRequest, l, e));
}
coprocessorService(stubMaker, callable, region, region.getStartKey()).whenComplete((r, e) -> {
if (e != null) {
callback.onRegionError(region, e);
} else {
callback.onRegionComplete(region, r);
}
if (unfinishedRequest.decrementAndGet() == 0 && locateFinished.get()) {
callback.onComplete();
}
});
}
private final class CoprocessorServiceBuilderImpl<S, R>
implements CoprocessorServiceBuilder<S, R> {
private final Function<RpcChannel, S> stubMaker;
private final CoprocessorCallable<S, R> callable;
private final CoprocessorCallback<R> callback;
private byte[] startKey = HConstants.EMPTY_START_ROW;
private boolean startKeyInclusive;
private byte[] endKey = HConstants.EMPTY_END_ROW;
private boolean endKeyInclusive;
public CoprocessorServiceBuilderImpl(Function<RpcChannel, S> stubMaker,
CoprocessorCallable<S, R> callable, CoprocessorCallback<R> callback) {
this.stubMaker = Preconditions.checkNotNull(stubMaker, "stubMaker is null");
this.callable = Preconditions.checkNotNull(callable, "callable is null");
this.callback = Preconditions.checkNotNull(callback, "callback is null");
}
@Override
public CoprocessorServiceBuilderImpl<S, R> fromRow(byte[] startKey, boolean inclusive) {
this.startKey = Preconditions.checkNotNull(startKey,
"startKey is null. Consider using" +
" an empty byte array, or just do not call this method if you want to start selection" +
" from the first region");
this.startKeyInclusive = inclusive;
return this;
}
@Override
public CoprocessorServiceBuilderImpl<S, R> toRow(byte[] endKey, boolean inclusive) {
this.endKey = Preconditions.checkNotNull(endKey,
"endKey is null. Consider using" +
" an empty byte array, or just do not call this method if you want to continue" +
" selection to the last region");
this.endKeyInclusive = inclusive;
return this;
}
@Override
public void execute() {
conn.getLocator().getRegionLocation(tableName, startKey,
startKeyInclusive ? RegionLocateType.CURRENT : RegionLocateType.AFTER, operationTimeoutNs)
.whenComplete(
(loc, error) -> onLocateComplete(stubMaker, callable, callback, new ArrayList<>(),
endKey, endKeyInclusive, new AtomicBoolean(false), new AtomicInteger(0), loc, error));
}
}
@Override
public <S, R> CoprocessorServiceBuilder<S, R> coprocessorService(
Function<RpcChannel, S> stubMaker, CoprocessorCallable<S, R> callable,
CoprocessorCallback<R> callback) {
return new CoprocessorServiceBuilderImpl<>(stubMaker, callable, callback);
}
}
| |
/*
The author of this code is Wilco Oelen and he offers it
freely without copyright, but asks that his pages are referenced
as the source if used.
He has a webpage with information on the polynomial software
he ported and more modern versions which require jini bindings:
https://woelen.homescience.net/science/math/exps/polynomials/
https://woelen.homescience.net/science/math/exps/polynomials/software.html
The code here is from the Java port of RPoly, CPoly and MPSolve 1996 algorithms:
https://woelen.homescience.net/science/math/exps/polynomials/software/polsolve.tgz
MPSolve is an implementation of the algorithms of Bini & Fiorentino 2000,
"Design, analysis, and implementation of a multiprecision polynomial rootfinder".
...Counting, isolating and approximating all roots in a given set S are the main
goals that the algorithm provides. Automatic determination of multiplicities
and the detection of real or imaginary roots can be selected as well.
Polynomials having coef- ficients with a bounded precision may be processed too.
...
The algorithm is based on an adaptive strategy which automatically exploits
any specific feature of the input polynomial, like its sparsity or the
conditioning of its roots, in order to speed up the computation.
...
The resulting algorithm MPSolve, implemented in C, can deal with polynomials
having real or complex coefficients with integer, rational or floating point
real and imaginary parts with any number of digits. The algorithm can also
process polynomials with approximately known coefficients.
*/
package thirdparty.net.oelen.polsolve.pzeros;
import thirdparty.net.oelen.polarith.Complex;
import thirdparty.net.oelen.polarith.DoubleComplex;
import thirdparty.net.oelen.polarith.DoubleDouble;
// This class is the class that has public methods and can be used by
// user programs. The other classes in this package (PZerosD, PZerosDD,
// and Convex) are not intended for use by user programs, they are
// helper classes for PZeros.
// Below follows documentation of all public methods, which are provided
// by the PZeros class.
public strictfp class PZeros {
private static int MAX_ITERATIONS = 10000;
private int degree;
private DoubleDouble[] coefsReal;
private DoubleComplex[] coefsCplx;
/**
* Constructor that takes an array of real coefficients. Coefficients are
* supplied in order of increasing power. Example: p(x) = A + B*x + C*x^2 +
* D*x^3 + E*x^4 gives coefficients[] = {A, B, C, D, E}. The degree of the
* polynomial is equal to the length of the supplied array minus 1.
*
* @param coef An array of real coefficients in order of increasing power.
*/
public PZeros(double[] coef) {
degree = coef.length - 1;
coefsReal = new DoubleDouble[coef.length];
for (int i=0; i<coef.length; i++) {
coefsReal[i] = new DoubleDouble(coef[i]);
}
coefsCplx = null;
}
/**
* Constructor that takes an array of real coefficients. Coefficients are
* supplied in order of increasing power. Example: p(x) = A + B*x + C*x^2 +
* D*x^3 + E*x^4 gives coefficients[] = {A, B, C, D, E}. The degree of the
* polynomial is equal to the length of the supplied array minus 1.
*
* @param coef An array of real coefficients in order of increasing power.
*/
public PZeros(String[] coef) {
degree = coef.length - 1;
coefsReal = new DoubleDouble[coef.length];
for (int i=0; i<coef.length; i++) {
coefsReal[i] = new DoubleDouble(coef[i]);
}
coefsCplx = null;
}
/**
* Constructor that takes an array of real coefficients. Coefficients are
* supplied in order of increasing power. Example: p(x) = A + B*x + C*x^2 +
* D*x^3 + E*x^4 gives coefficients[] = {A, B, C, D, E}. The degree of the
* polynomial is equal to the length of the supplied array minus 1.
*
* @param coef An array of real coefficients in order of increasing power.
*/
public PZeros(DoubleDouble[] coef) {
degree = coef.length - 1;
coefsReal = new DoubleDouble[coef.length];
for (int i=0; i<coef.length; i++) {
coefsReal[i] = coef[i];
}
coefsCplx = null;
}
/**
* Constructor that takes an array with the real part of coefficients and
* an array with the imaginary part of the coefficients. Coefficients are
* supplied in order of increasing power. Example: p(x) = A + B*x + C*x^2 +
* D*x^3 + E*x^4 gives coefficients[] = {A, B, C, D, E}.
* The degree of the polynomial is determined by the length of the longest
* supplied array. If the longest supplied array has N elements, then the
* degree of the polynomial equals N-1. The shorter array is extended with
* zero values for the higher powers. E.g. a polynomial with arrays
* {1,2,3} and {11,22,33,44,55} has degree 4 and can be written as
* (1+11i) + (2+22i)*x + (3+33i)*x^2 + 44i*x^3 + 55i*x^4
*
* @param coef_re An array containing the real part of the coefficients
* in order of increasing power. If the supplied array equals null, then
* the degree is determined by the length of the other array and the real
* part of all coefficients equals 0 in that case.
* @param coef_im An array containing the imaginary part of the coefficients
* in order of increasing power. If the supplied array equals null, then
* the degree is determined by the length of the other array and the imaginary
* part of all coefficients equals 0 in that case.
*/
public PZeros(double[] coef_re, double[] coef_im) {
if ((coef_re == null || coef_re.length==0) &&
(coef_im == null || coef_im.length==0)) {
throw new RuntimeException("Construction of PZeros with empty coefficient set.");
}
if (coef_im == null || coef_im.length==0) {
degree = coef_re.length - 1;
coefsReal = new DoubleDouble[coef_re.length];
for (int i=0; i<coef_re.length; i++) {
coefsReal[i] = new DoubleDouble(coef_re[i]);
}
coefsCplx = null;
return;
}
if (coef_re == null) {
coef_re = new double[0];
}
int degRe = coef_re.length - 1;
int degIm = coef_im.length - 1;
if (degRe < degIm) {
degree = degIm;
coefsCplx = new DoubleComplex[coef_im.length];
int i;
for (i=0; i<=degRe; i++) {
coefsCplx[i] = new DoubleComplex(coef_re[i], coef_im[i]);
}
for (; i<coef_im.length; i++) {
coefsCplx[i] = new DoubleComplex(DoubleDouble.ZERO, new DoubleDouble(coef_im[i]));
}
}
else {
degree = degRe;
coefsCplx = new DoubleComplex[coef_re.length];
int i;
for (i=0; i<=degIm; i++) {
coefsCplx[i] = new DoubleComplex(coef_re[i], coef_im[i]);
}
for (; i<coef_re.length; i++) {
coefsCplx[i] = new DoubleComplex(coef_re[i]);
}
}
coefsReal = null;
}
/**
* Constructor that takes an array with the real part of coefficients and
* an array with the imaginary part of the coefficients. Coefficients are
* supplied in order of increasing power. Example: p(x) = A + B*x + C*x^2 +
* D*x^3 + E*x^4 can be created with coef[] = {A, B, C, D, E}.
* The degree of the polynomial is determined by the length of the longest
* supplied array. If the longest supplied array has N elements, then the
* degree of the polynomial equals N-1. The shorter array is extended with
* zero values for the higher powers. E.g. a polynomial with arrays
* {1,2,3} and {11,22,33,44,55} has degree 4 and can be written as
* (1+11i) + (2+22i)*x + (3+33i)*x^2 + 44i*x^3 + 55i*x^4
*
* @param coef_re An array containing the real part of the coefficients
* in order of increasing power. If the supplied array equals null, then
* the degree is determined by the length of the other array and the real
* part of all coefficients equals 0 in that case.
* @param coef_im An array containing the imaginary part of the coefficients
* in order of increasing power. If the supplied array equals null, then
* the degree is determined by the length of the other array and the imaginary
* part of all coefficients equals 0 in that case.
*/
public PZeros(String[] coef_re, String[] coef_im) {
if ((coef_re == null || coef_re.length==0) &&
(coef_im == null || coef_im.length==0)) {
throw new RuntimeException("Construction of PZeros with empty coefficient set.");
}
if (coef_im == null || coef_im.length==0) {
degree = coef_re.length - 1;
coefsReal = new DoubleDouble[coef_re.length];
for (int i=0; i<coef_re.length; i++) {
coefsReal[i] = new DoubleDouble(coef_re[i]);
}
coefsCplx = null;
return;
}
if (coef_re == null) {
coef_re = new String[0];
}
int degRe = coef_re.length - 1;
int degIm = coef_im.length - 1;
if (degRe < degIm) {
degree = degIm;
coefsCplx = new DoubleComplex[coef_im.length];
int i;
for (i=0; i<=degRe; i++) {
coefsCplx[i] = new DoubleComplex(coef_re[i], coef_im[i]);
}
for (; i<coef_im.length; i++) {
coefsCplx[i] = new DoubleComplex(DoubleDouble.ZERO, new DoubleDouble(coef_im[i]));
}
}
else {
degree = degRe;
coefsCplx = new DoubleComplex[coef_re.length];
int i;
for (i=0; i<=degIm; i++) {
coefsCplx[i] = new DoubleComplex(coef_re[i], coef_im[i]);
}
for (; i<coef_re.length; i++) {
coefsCplx[i] = new DoubleComplex(coef_re[i]);
}
}
coefsReal = null;
}
/**
* Constructor that takes an array with the real part of coefficients and
* an array with the imaginary part of the coefficients. Coefficients are
* supplied in order of increasing power. Example: p(x) = A + B*x + C*x^2 +
* D*x^3 + E*x^4 can be created with coef[] = {A, B, C, D, E}.
* The degree of the polynomial is determined by the length of the longest
* supplied array. If the longest supplied array has N elements, then the
* degree of the polynomial equals N-1. The shorter array is extended with
* zero values for the higher powers. E.g. a polynomial with arrays
* {1,2,3} and {11,22,33,44,55} has degree 4 and can be written as
* (1+11i) + (2+22i)*x + (3+33i)*x^2 + 44i*x^3 + 55i*x^4
*
* @param coef_re An array containing the real part of the coefficients
* in order of increasing power. If the supplied array equals null, then
* the degree is determined by the length of the other array and the real
* part of all coefficients equals 0 in that case.
* @param coef_im An array containing the imaginary part of the coefficients
* in order of increasing power. If the supplied array equals null, then
* the degree is determined by the length of the other array and the imaginary
* part of all coefficients equals 0 in that case.
*/
public PZeros(DoubleDouble[] coef_re, DoubleDouble[] coef_im) {
if ((coef_re == null || coef_re.length==0) &&
(coef_im == null || coef_im.length==0)) {
throw new RuntimeException("Construction of PZeros with empty coefficient set.");
}
if (coef_im == null || coef_im.length==0) {
degree = coef_re.length - 1;
coefsReal = new DoubleDouble[coef_re.length];
for (int i=0; i<coef_re.length; i++) {
coefsReal[i] = coef_re[i];
}
coefsCplx = null;
return;
}
if (coef_re == null) {
coef_re = new DoubleDouble[0];
}
int degRe = coef_re.length - 1;
int degIm = coef_im.length - 1;
if (degRe < degIm) {
degree = degIm;
coefsCplx = new DoubleComplex[coef_im.length];
int i;
for (i=0; i<=degRe; i++) {
coefsCplx[i] = new DoubleComplex(coef_re[i], coef_im[i]);
}
for (; i<coef_im.length; i++) {
coefsCplx[i] = new DoubleComplex(DoubleDouble.ZERO, coef_im[i]);
}
}
else {
degree = degRe;
coefsCplx = new DoubleComplex[coef_re.length];
int i;
for (i=0; i<=degIm; i++) {
coefsCplx[i] = new DoubleComplex(coef_re[i], coef_im[i]);
}
for (; i<coef_re.length; i++) {
coefsCplx[i] = new DoubleComplex(coef_re[i]);
}
}
coefsReal = null;
}
/**
* Constructor that takes an array of complex coefficients. Coefficients are
* supplied in order of increasing power. Example: p(x) = A + B*x + C*x^2 +
* D*x^3 + E*x^4 can be created with coef[] = {A, B, C, D, E}. The degree of the
* polynomial is equal to the length of the supplied array minus 1.
*
* @param coef An array of complex coefficients in order of increasing power.
*/
public PZeros(DoubleComplex[] coef) {
degree = coef.length - 1;
coefsCplx = new DoubleComplex[coef.length];
for (int i=0; i<coef.length; i++) {
coefsCplx[i] = coef[i];
}
coefsReal = null;
}
/**
* Constructor that takes an array of complex coefficients. Coefficients are
* supplied in order of increasing power. Example: p(x) = A + B*x + C*x^2 +
* D*x^3 + E*x^4 can be created with coef[] = {A, B, C, D, E}. The degree of the
* polynomial is equal to the length of the supplied array minus 1.
*
* @param coef An array of complex coefficients in order of increasing power.
*/
public PZeros(Complex[] coef) {
degree = coef.length - 1;
coefsCplx = new DoubleComplex[coef.length];
for (int i=0; i<coef.length; i++) {
coefsCplx[i] = new DoubleComplex(coef[i]);
}
coefsReal = null;
}
/**
* Returns the degree of the polynomial.
*
* @return The degree of the polynomial.
*/
public int degree() {
return degree;
}
/**
* This method computes the roots of the polynomial and stores the roots
* in preallocated arrays, which are passed as arguments. DoubleDouble
* 105-bit precision is used for the calculations and the results also are
* returned at DoubleDouble precision.
* @param root Array, in which the zeros will be stored after computation of
* the zeros. This array must have a length of at least N elements, where N
* is the degree of the polynomial.
* @param radius Array, which gives an indication of the accuracy of the found
* roots. For each root, a radius is returned. The root is assured to be in the
* disk with the corresponding radius, centered around the returned root.
* @param err Array, which specifies whether the root and its corresponding
* radius of accuracy could be determined correctly. If err[i] is true, then
* the program did not converge for root[i].
* @return Returns the degree of the polynomial if the computation succeeds,
* and returns a value less than the degree of the polynomial if an error
* occurs (e.g. convergence failure). When a value less than the degree of
* the polynomial is returned, then only part (or none) of the roots could
* be determined. If a negative value is returned, then the supplied input
* is not correct:
* -1: Leading coefficient equals 0.
* -2: Coefficient for x^0 (constant coefficient) equals 0.
* -3: Ratio of smallest coefficient magnitude and largest coefficient
* magnitude is too large and will lead to underflow/overflow.
*/
public int solve(DoubleComplex[] root, double[] radius, boolean[] err) {
int status;
int[] iter = new int[] {0};
if (coefsReal != null) {
status = pzeros(degree, coefsReal, MAX_ITERATIONS, root, radius, err, iter);
}
else {
status = pzeros(degree, coefsCplx, MAX_ITERATIONS, root, radius, err, iter);
}
return status;
}
/**
* This method computes the roots of the polynomial and stores the roots
* in preallocated arrays, which are passed as arguments. Standard 53-bits
* precision is used for the calculations and the results also are returned as
* standard 53-bits precision numbers.
* @param root Array, in which the zeros will be stored after computation of
* the zeros. This array must have a length of at least N elements, where N
* is the degree of the polynomial.
* @param radius Array, which gives an indication of the accuracy of the found
* roots. For each root, a radius is returned. The root is assured to be in the
* disk with the corresponding radius, centered around the returned root.
* @param err Array, which specifies whether the root and its corresponding
* radius of accuracy could be determined correctly. If err[i] is true, then
* the program did not converge for root[i].
* @return Returns the degree of the polynomial if the computation succeeds,
* and returns a value less than the degree of the polynomial if an error
* occurs (e.g. convergence failure). When a value less than the degree of
* the polynomial is returned, then only part (or none) of the roots could
* be determined. If a negative value is returned, then the supplied input
* is not correct:
* -1: Leading coefficient equals 0.
* -2: Coefficient for x^0 (constant coefficient) equals 0.
* -3: Ratio of smallest coefficient magnitude and largest coefficient
* magnitude is too large and will lead to underflow/overflow.
*/
public int solve(Complex[] root, double[] radius, boolean[] err) {
int status;
int[] iter = new int[] {0};
if (coefsReal != null) {
status = pzeros(degree, DoubleDouble.toDouble(coefsReal), MAX_ITERATIONS, root, radius, err, iter);
}
else {
status = pzeros(degree, DoubleComplex.toComplex(coefsCplx), MAX_ITERATIONS, root, radius, err, iter);
}
return status;
}
/***********************************************************************/
/***********************************************************************/
/********* Below follows the port of the original Fortran *************/
/********* program. It is all private to this module! *************/
/********* The code above is a wrapper for easy usage. *************/
/***********************************************************************/
/***********************************************************************/
/***********************************************************************/
// Driver code, which uses PolSolveD for normal double precision (53 bits),
// and uses PolSolveDD for DoubleDouble precision (104 bits). This code
// contains driver methods for solving real and complex polynomial equations.
//
// The driver methods for DoubleDouble precision use a special strategy. They
// first use the normal precision code and if this cannot improve the solutions
// anymore, or cannot find all solutions, then it uses the DoubleDouble code
// to improve the situation. This strategy makes the use of DoubleDouble code
// approximately 3 times as fast, compared with sole use of DoubleDouble. For
// the great majority of polynomials, this strategy assures that the initial
// search and isolation of roots is done at normal precision, while the slower
// high precision code only is used at the final stages of refinement.
private static final double BIG = Double.MAX_VALUE; // near overflow of double
private static final double EPS_D = 1.0 / (1l<<52); // 52 bits of precision
private static final double EPS_DD = EPS_D*EPS_D; // 104 bits of precision
// Driver method for polynomial with real coefficients at 105 bit precision.
private static int pzeros(int n, DoubleDouble[] a, int itmax,
DoubleComplex[] root, double[] radius, boolean[] err, int[] iter) {
double[] apoly = new double[n + 1];
double[] apolyr = new double[n + 1];
// If we have a linear equation, simply call the high precision code
// and let it compute the zero at high precision.
if (n == 1) {
int status = PZerosDD.polzeros(n, a, n*EPS_DD, BIG, 1,
root, radius, err, iter,
true, apoly, apolyr, true);
return status == 0 ? 1 : status;
}
// First call the lower precision code and prepare coefficient arrays
// and root arrays for the lower precision numbers.
double[] a_d = new double[a.length];
for (int i=0; i<a.length; i++) {
a_d[i] = a[i].doubleValue();
}
Complex[] root_d = new Complex[root.length];
int status = PZerosD.polzeros(n, a_d, n*EPS_D, BIG, 3*itmax/5,
root_d, radius, err, iter,
true, apoly, apolyr, false);
if (status != 0 && status != -4) {
return status;
}
// Convert the found roots to the higher precision format and prepare
// a boolean status vector for the high precision code. With these
// high quality initial estimates continue the process of root finding
// at high precision.
for (int i=0; i<root.length; i++) {
root[i] = new DoubleComplex(root_d[i].real(), root_d[i].imag());
err[i] = (radius[i] >= 0.0);
}
status = PZerosDD.polzeros(n, a, n*EPS_DD, BIG, itmax,
root, radius, err, iter,
false, apoly, apolyr, true);
return transformStatus(status, err);
}
// Driver method for polynomial with complex coefficients at 104 bit precision.
private static int pzeros(int n, DoubleComplex[] a, int itmax,
DoubleComplex[] root, double[] radius, boolean[] err, int[] iter) {
double[] apoly = new double[n + 1];
double[] apolyr = new double[n + 1];
// If we have a linear equation, simply call the high precision code
// and let it compute the zero at high precision.
if (n == 1) {
int status = PZerosDD.polzeros(n, a, n*EPS_DD, BIG, 1,
root, radius, err, iter,
true, apoly, apolyr, true);
return status == 0 ? 1 : status;
}
// First call the lower precision code and prepare coefficient arrays
// and root arrays for the lower precision numbers.
Complex[] a_d = new Complex[a.length];
for (int i=0; i<a.length; i++) {
a_d[i] = new Complex(a[i].real().doubleValue(), a[i].imag().doubleValue());
}
Complex[] root_d = new Complex[root.length];
int status = PZerosD.polzeros(n, a_d, n*EPS_D, BIG, 3*itmax/5,
root_d, radius, err, iter,
true, apoly, apolyr, false);
if (status != 0 && status != -4) {
return status;
}
// Convert the found roots to the higher precision format and prepare
// a boolean status vector for the high precision code. With these
// high quality initial estimates continue the process of root finding
// at high precision.
for (int i=0; i<root.length; i++) {
root[i] = new DoubleComplex(root_d[i].real(), root_d[i].imag());
err[i] = (radius[i] >= 0.0);
}
status = PZerosDD.polzeros(n, a, n*EPS_DD, BIG, itmax,
root, radius, err, iter,
false, apoly, apolyr, true);
return transformStatus(status, err);
}
// Driver method for polynomial with real coefficients at 53 bit precision.
private static int pzeros(int n, double[] a, int itmax,
Complex[] root, double[] radius, boolean[] err, int[] iter) {
double[] apoly = new double[n + 1];
double[] apolyr = new double[n + 1];
int status = PZerosD.polzeros(n, a, EPS_D, BIG, itmax,
root, radius, err, iter,
true, apoly, apolyr, true);
return transformStatus(status, err);
}
// Driver method for polynomial with complex coefficients at 53 bit precision.
private static int pzeros(int n, Complex[] a, int itmax,
Complex[] root, double[] radius, boolean[] err, int[] iter) {
double[] apoly = new double[n + 1];
double[] apolyr = new double[n + 1];
int status = PZerosD.polzeros(n, a, EPS_D, BIG, itmax,
root, radius, err, iter,
true, apoly, apolyr, true);
return transformStatus(status, err);
}
private static int transformStatus(int status, boolean[] err) {
if (status == 0 || status == -4) {
status = 0;
for (boolean e : err) {
if (!e) status++;
}
}
return status;
}
}
| |
/*******************************************************************************
* Copyright (c) 2015-2018 Skymind, Inc.
*
* This program and the accompanying materials are made available under the
* terms of the Apache License, Version 2.0 which is available at
* https://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* SPDX-License-Identifier: Apache-2.0
******************************************************************************/
package org.deeplearning4j.spark;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.deeplearning4j.datasets.iterator.impl.IrisDataSetIterator;
import org.deeplearning4j.earlystopping.EarlyStoppingConfiguration;
import org.deeplearning4j.earlystopping.EarlyStoppingModelSaver;
import org.deeplearning4j.earlystopping.EarlyStoppingResult;
import org.deeplearning4j.earlystopping.listener.EarlyStoppingListener;
import org.deeplearning4j.earlystopping.saver.InMemoryModelSaver;
import org.deeplearning4j.earlystopping.termination.MaxEpochsTerminationCondition;
import org.deeplearning4j.earlystopping.termination.MaxScoreIterationTerminationCondition;
import org.deeplearning4j.earlystopping.termination.MaxTimeIterationTerminationCondition;
import org.deeplearning4j.earlystopping.termination.ScoreImprovementEpochTerminationCondition;
import org.deeplearning4j.earlystopping.trainer.IEarlyStoppingTrainer;
import org.deeplearning4j.nn.api.OptimizationAlgorithm;
import org.deeplearning4j.nn.conf.ComputationGraphConfiguration;
import org.deeplearning4j.nn.conf.NeuralNetConfiguration;
import org.deeplearning4j.nn.conf.layers.OutputLayer;
import org.deeplearning4j.nn.graph.ComputationGraph;
import org.deeplearning4j.nn.weights.WeightInit;
import org.deeplearning4j.optimize.listeners.ScoreIterationListener;
import org.deeplearning4j.spark.api.TrainingMaster;
import org.deeplearning4j.spark.earlystopping.SparkEarlyStoppingGraphTrainer;
import org.deeplearning4j.spark.earlystopping.SparkLossCalculatorComputationGraph;
import org.deeplearning4j.spark.impl.graph.dataset.DataSetToMultiDataSetFn;
import org.deeplearning4j.spark.impl.paramavg.ParameterAveragingTrainingMaster;
import org.junit.Test;
import org.nd4j.linalg.activations.Activation;
import org.nd4j.linalg.dataset.DataSet;
import org.nd4j.linalg.factory.Nd4j;
import org.nd4j.linalg.learning.config.Sgd;
import org.nd4j.linalg.lossfunctions.LossFunctions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import static org.junit.Assert.*;
public class TestEarlyStoppingSparkCompGraph extends BaseSparkTest {
@Test
public void testEarlyStoppingIris() {
ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder()
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.updater(new Sgd()).weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in")
.addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3)
.lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in")
.setOutputs("0").build();
ComputationGraph net = new ComputationGraph(conf);
net.setListeners(new ScoreIterationListener(5));
JavaRDD<DataSet> irisData = getIris();
EarlyStoppingModelSaver<ComputationGraph> saver = new InMemoryModelSaver<>();
EarlyStoppingConfiguration<ComputationGraph> esConf = new EarlyStoppingConfiguration.Builder<ComputationGraph>()
.epochTerminationConditions(new MaxEpochsTerminationCondition(5))
.iterationTerminationConditions(new MaxTimeIterationTerminationCondition(2, TimeUnit.MINUTES))
.scoreCalculator(new SparkLossCalculatorComputationGraph(
irisData.map(new DataSetToMultiDataSetFn()), true, sc.sc()))
.modelSaver(saver).build();
TrainingMaster tm = new ParameterAveragingTrainingMaster(true, numExecutors(), 1, 10, 1, 0);
IEarlyStoppingTrainer<ComputationGraph> trainer = new SparkEarlyStoppingGraphTrainer(getContext().sc(), tm,
esConf, net, irisData.map(new DataSetToMultiDataSetFn()));
EarlyStoppingResult<ComputationGraph> result = trainer.fit();
System.out.println(result);
assertEquals(5, result.getTotalEpochs());
assertEquals(EarlyStoppingResult.TerminationReason.EpochTerminationCondition, result.getTerminationReason());
Map<Integer, Double> scoreVsIter = result.getScoreVsEpoch();
assertEquals(5, scoreVsIter.size());
String expDetails = esConf.getEpochTerminationConditions().get(0).toString();
assertEquals(expDetails, result.getTerminationDetails());
ComputationGraph out = result.getBestModel();
assertNotNull(out);
//Check that best score actually matches (returned model vs. manually calculated score)
ComputationGraph bestNetwork = result.getBestModel();
double score = bestNetwork.score(new IrisDataSetIterator(150, 150).next());
double bestModelScore = result.getBestModelScore();
assertEquals(bestModelScore, score, 1e-3);
}
@Test
public void testBadTuning() {
//Test poor tuning (high LR): should terminate on MaxScoreIterationTerminationCondition
Nd4j.getRandom().setSeed(12345);
ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345)
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.updater(new Sgd(2.0)) //Intentionally huge LR
.weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in")
.addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3).activation(Activation.IDENTITY)
.lossFunction(LossFunctions.LossFunction.MSE).build(), "in")
.setOutputs("0").build();
ComputationGraph net = new ComputationGraph(conf);
net.setListeners(new ScoreIterationListener(5));
JavaRDD<DataSet> irisData = getIris();
EarlyStoppingModelSaver<ComputationGraph> saver = new InMemoryModelSaver<>();
EarlyStoppingConfiguration<ComputationGraph> esConf = new EarlyStoppingConfiguration.Builder<ComputationGraph>()
.epochTerminationConditions(new MaxEpochsTerminationCondition(5000))
.iterationTerminationConditions(new MaxTimeIterationTerminationCondition(2, TimeUnit.MINUTES),
new MaxScoreIterationTerminationCondition(7.5)) //Initial score is ~2.5
.scoreCalculator(new SparkLossCalculatorComputationGraph(
irisData.map(new DataSetToMultiDataSetFn()), true, sc.sc()))
.modelSaver(saver).build();
TrainingMaster tm = new ParameterAveragingTrainingMaster(true, numExecutors(), 1, 10, 1, 0);
IEarlyStoppingTrainer<ComputationGraph> trainer = new SparkEarlyStoppingGraphTrainer(getContext().sc(), tm,
esConf, net, irisData.map(new DataSetToMultiDataSetFn()));
EarlyStoppingResult result = trainer.fit();
assertTrue(result.getTotalEpochs() < 5);
assertEquals(EarlyStoppingResult.TerminationReason.IterationTerminationCondition,
result.getTerminationReason());
String expDetails = new MaxScoreIterationTerminationCondition(7.5).toString();
assertEquals(expDetails, result.getTerminationDetails());
}
@Test
public void testTimeTermination() {
//test termination after max time
Nd4j.getRandom().setSeed(12345);
ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345)
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.updater(new Sgd(1e-6)).weightInit(WeightInit.XAVIER).graphBuilder()
.addInputs("in")
.addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3)
.lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in")
.setOutputs("0").build();
ComputationGraph net = new ComputationGraph(conf);
net.setListeners(new ScoreIterationListener(5));
JavaRDD<DataSet> irisData = getIris();
EarlyStoppingModelSaver<ComputationGraph> saver = new InMemoryModelSaver<>();
EarlyStoppingConfiguration<ComputationGraph> esConf = new EarlyStoppingConfiguration.Builder<ComputationGraph>()
.epochTerminationConditions(new MaxEpochsTerminationCondition(10000))
.iterationTerminationConditions(new MaxTimeIterationTerminationCondition(3, TimeUnit.SECONDS),
new MaxScoreIterationTerminationCondition(7.5)) //Initial score is ~2.5
.scoreCalculator(new SparkLossCalculatorComputationGraph(
irisData.map(new DataSetToMultiDataSetFn()), true, sc.sc()))
.modelSaver(saver).build();
TrainingMaster tm = new ParameterAveragingTrainingMaster(true, numExecutors(), 1, 10, 1, 0);
IEarlyStoppingTrainer<ComputationGraph> trainer = new SparkEarlyStoppingGraphTrainer(getContext().sc(), tm,
esConf, net, irisData.map(new DataSetToMultiDataSetFn()));
long startTime = System.currentTimeMillis();
EarlyStoppingResult result = trainer.fit();
long endTime = System.currentTimeMillis();
int durationSeconds = (int) (endTime - startTime) / 1000;
assertTrue(durationSeconds >= 3);
assertTrue(durationSeconds <= 20);
assertEquals(EarlyStoppingResult.TerminationReason.IterationTerminationCondition,
result.getTerminationReason());
String expDetails = new MaxTimeIterationTerminationCondition(3, TimeUnit.SECONDS).toString();
assertEquals(expDetails, result.getTerminationDetails());
}
@Test
public void testNoImprovementNEpochsTermination() {
//Idea: terminate training if score (test set loss) does not improve for 5 consecutive epochs
//Simulate this by setting LR = 0.0
Nd4j.getRandom().setSeed(12345);
ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder().seed(12345)
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.updater(new Sgd(0.0)).weightInit(WeightInit.XAVIER).graphBuilder()
.addInputs("in")
.addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3)
.lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in")
.setOutputs("0").build();
ComputationGraph net = new ComputationGraph(conf);
net.setListeners(new ScoreIterationListener(5));
JavaRDD<DataSet> irisData = getIris();
EarlyStoppingModelSaver<ComputationGraph> saver = new InMemoryModelSaver<>();
EarlyStoppingConfiguration<ComputationGraph> esConf = new EarlyStoppingConfiguration.Builder<ComputationGraph>()
.epochTerminationConditions(new MaxEpochsTerminationCondition(100),
new ScoreImprovementEpochTerminationCondition(5))
.iterationTerminationConditions(new MaxScoreIterationTerminationCondition(7.5)) //Initial score is ~2.5
.scoreCalculator(new SparkLossCalculatorComputationGraph(
irisData.map(new DataSetToMultiDataSetFn()), true, sc.sc()))
.modelSaver(saver).build();
TrainingMaster tm = new ParameterAveragingTrainingMaster(true, numExecutors(), 1, 10, 1, 0);
IEarlyStoppingTrainer<ComputationGraph> trainer = new SparkEarlyStoppingGraphTrainer(getContext().sc(), tm,
esConf, net, irisData.map(new DataSetToMultiDataSetFn()));
EarlyStoppingResult result = trainer.fit();
//Expect no score change due to 0 LR -> terminate after 6 total epochs
assertTrue(result.getTotalEpochs() < 12); //Normally expect 6 epochs exactly; get a little more than that here due to rounding + order of operations
assertEquals(EarlyStoppingResult.TerminationReason.EpochTerminationCondition, result.getTerminationReason());
String expDetails = new ScoreImprovementEpochTerminationCondition(5).toString();
assertEquals(expDetails, result.getTerminationDetails());
}
@Test
public void testListeners() {
ComputationGraphConfiguration conf = new NeuralNetConfiguration.Builder()
.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
.updater(new Sgd()).weightInit(WeightInit.XAVIER).graphBuilder().addInputs("in")
.addLayer("0", new OutputLayer.Builder().nIn(4).nOut(3)
.lossFunction(LossFunctions.LossFunction.MCXENT).build(), "in")
.setOutputs("0").build();
ComputationGraph net = new ComputationGraph(conf);
net.setListeners(new ScoreIterationListener(5));
JavaRDD<DataSet> irisData = getIris();
EarlyStoppingModelSaver<ComputationGraph> saver = new InMemoryModelSaver<>();
EarlyStoppingConfiguration<ComputationGraph> esConf = new EarlyStoppingConfiguration.Builder<ComputationGraph>()
.epochTerminationConditions(new MaxEpochsTerminationCondition(5))
.iterationTerminationConditions(new MaxTimeIterationTerminationCondition(2, TimeUnit.MINUTES))
.scoreCalculator(new SparkLossCalculatorComputationGraph(
irisData.map(new DataSetToMultiDataSetFn()), true, sc.sc()))
.modelSaver(saver).build();
LoggingEarlyStoppingListener listener = new LoggingEarlyStoppingListener();
TrainingMaster tm = new ParameterAveragingTrainingMaster(true, numExecutors(), 1, 10, 1, 0);
IEarlyStoppingTrainer<ComputationGraph> trainer = new SparkEarlyStoppingGraphTrainer(getContext().sc(), tm,
esConf, net, irisData.map(new DataSetToMultiDataSetFn()));
trainer.setListener(listener);
trainer.fit();
assertEquals(1, listener.onStartCallCount);
assertEquals(5, listener.onEpochCallCount);
assertEquals(1, listener.onCompletionCallCount);
}
private static class LoggingEarlyStoppingListener implements EarlyStoppingListener<ComputationGraph> {
private static Logger log = LoggerFactory.getLogger(LoggingEarlyStoppingListener.class);
private int onStartCallCount = 0;
private int onEpochCallCount = 0;
private int onCompletionCallCount = 0;
@Override
public void onStart(EarlyStoppingConfiguration esConfig, ComputationGraph net) {
log.info("EarlyStopping: onStart called");
onStartCallCount++;
}
@Override
public void onEpoch(int epochNum, double score, EarlyStoppingConfiguration esConfig, ComputationGraph net) {
log.info("EarlyStopping: onEpoch called (epochNum={}, score={}}", epochNum, score);
onEpochCallCount++;
}
@Override
public void onCompletion(EarlyStoppingResult esResult) {
log.info("EorlyStopping: onCompletion called (result: {})", esResult);
onCompletionCallCount++;
}
}
private JavaRDD<DataSet> getIris() {
JavaSparkContext sc = getContext();
IrisDataSetIterator iter = new IrisDataSetIterator(1, 150);
List<DataSet> list = new ArrayList<>(150);
while (iter.hasNext())
list.add(iter.next());
return sc.parallelize(list);
}
}
| |
package com.andrewofarm.msbcr;
import com.andrewofarm.msbcr.objects.geom.*;
import com.andrewofarm.msbcr.objects.programs.*;
import org.joml.Matrix4f;
import org.joml.Vector3f;
import org.joml.Vector4f;
import org.lwjgl.Version;
import org.lwjgl.glfw.*;
import org.lwjgl.opengl.GL;
import static org.lwjgl.glfw.GLFW.*;
import static org.lwjgl.opengl.GL11.*;
import static org.lwjgl.opengl.GL20.GL_VERTEX_PROGRAM_POINT_SIZE;
import static org.lwjgl.opengl.GL30.GL_FRAMEBUFFER;
import static org.lwjgl.opengl.GL30.glBindFramebuffer;
import static org.lwjgl.system.MemoryUtil.NULL;
/**
* Created by Andrew Farm on 6/7/17.
*/
@SuppressWarnings("DefaultFileTemplate")
public class HelloWorld {
private long window;
private int windowWidth = 800;
private int windowHeight = 600;
private static final float GLOBE_RADIUS = 1.0f;
private static final float ATMOSPHERE_WIDTH = 0.2f;
private static final float ATMOSPHERE_CEILING = GLOBE_RADIUS + ATMOSPHERE_WIDTH;
private static final float SEA_LEVEL = 0.5f;
private static final float TERRAIN_SCALE = 0.75f;
private float lightX = -1, lightY = 0, lightZ = 0;
private static final float LOOK_SPEED = 0.02f;
private boolean up, down, left, right;
private float camLookAzimuth = 0, camLookElev = 0;
private float camAzimuth = 0, camElev = 0;
private float camDist = 4;
private float globeAzimuth = 0;
private Vector3f camPos = new Vector3f();
private Vector3f camPosModelSpace = new Vector3f();
private Vector4f camPos4 = new Vector4f();
private Vector4f camPosModelSpace4 = new Vector4f();
private static final float FOV = (float) Math.PI / 4;
private static final float TWO_TAN_HALF_FOV = (float) (2 * Math.tan(FOV / 2));
private float auroraNoisePhase = 0;
private static final float AURORA_NOISE_PHASE_INCREMENT = 20f;
private float cloudsNoisePhase = 0;
private static final float CLOUDS_NOISE_PHASE_INCREMENT = 5f;
private float experimentalFilterNoisePhase = 0;
private static final float EXPERIMENTAL_FILTER_NOISE_PHASE_INCREMENT = 200f;
private boolean drawRings = false;
private boolean dragging = false;
private double prevX, prevY;
private float timePassage = 0.005f;
private static final float TIME_MOD = 1.1f;
private boolean speedUp, slowDown;
private boolean geostationary = true;
private Matrix4f modelMatrix = new Matrix4f();
private Matrix4f inverseModelMatrix = new Matrix4f();
private static final float GEOMAGNETIC_POLE_LATITUDE = 0.3033f;
private static final float GEOMAGNETIC_POLE_LONGITUDE = 0.1681f;
private static final float AURORA_POLAR_ANGLE = 0.3f;
private static final float AURORA_LOWER_BOUND = GLOBE_RADIUS + 0.01f;
private static final float AURORA_UPPER_BOUND = GLOBE_RADIUS + 0.075f;
private Matrix4f auroraModelMatrix = new Matrix4f();
private Matrix4f viewMatrix = new Matrix4f();
private Matrix4f projectionMatrix = new Matrix4f();
private Matrix4f mvpMatrix = new Matrix4f();
private Matrix4f vpRotationMatrix = new Matrix4f();
private Matrix4f lightViewMatrix = new Matrix4f().lookAt(
new Vector3f(lightX, lightY, lightZ).normalize().mul(2),
new Vector3f(0, 0, 0),
new Vector3f(0, 1, 0));
private Matrix4f lightProjectionMatrix = new Matrix4f().ortho(-2, 2, -2, 2, 0, 6);
private Matrix4f lightBiasMatrix = new Matrix4f(
0.5f, 0.0f, 0.0f, 0.0f,
0.0f, 0.5f, 0.0f, 0.0f,
0.0f, 0.0f, 0.5f, 0.0f,
0.5f, 0.5f, 0.5f, 1.0f);
private Matrix4f lightMvpMatrix = new Matrix4f();
private Matrix4f lightBiasMvpMatrix = new Matrix4f();
private static final int MERIDIANS = 512;
private static final int PARALLELS = 256;
private SkyboxGeometry skybox = new SkyboxGeometry();
private SunGeometry sun = new SunGeometry(lightX, lightY, lightZ);
private AdaptiveGlobeGeometry globe = new AdaptiveGlobeGeometry(1.0f, 64);
private RingsGeometry rings = new RingsGeometry(128, 1.5f, 3.0f);
private OceanGeometry ocean = new OceanGeometry(1.0f, MERIDIANS, PARALLELS);
private OceanGeometry cloudLayer1 = new OceanGeometry(GLOBE_RADIUS + 0.02f, MERIDIANS, PARALLELS);
private AtmosphereCeilingGeometry atmCeiling = new AtmosphereCeilingGeometry(ATMOSPHERE_CEILING, 64, 32);
private AuroraGeometry aurora = new AuroraGeometry(512, AURORA_LOWER_BOUND, AURORA_UPPER_BOUND);
private AdaptiveGlobeShadowMapShaderProgram shadowMapShaderProgram;
private SkyboxShaderProgram skyboxShaderProgram;
private SunShaderProgram sunShaderProgram;
private AdaptiveGlobeShaderProgram adaptiveGlobeShaderProgram;
private RingsShaderProgram ringsShaderProgram;
private OceanShaderProgram oceanShaderProgram;
private CloudShaderProgram cloudShaderProgram;
private AtmosphereCeilingShaderProgram atmosphereCeilingShaderProgram;
private AuroraShaderProgram auroraShaderProgram;
private ScreenGeometry screenGeometry = new ScreenGeometry();
private ScreenShaderProgram screenShaderProgram;
private SimpleScreenShaderProgram simpleScreenShaderProgram;
private HDRScreenShaderProgram hdrScreenShaderProgram;
private ExperimentalScreenShaderProgram experimentalScreenShaderProgram;
private int starfieldTexture;
private int sunTexture;
private int globeTexture;
private int displacementMap;
private int normalMap;
private int ringsTexture;
private int auroraTexture;
private int shadowMapWidth = 4096;
private int shadowMapHeight = 4096;
private int shadowMapFramebuffer;
private int shadowMapDepthTexture;
private int screenFramebuffer;
private int screenTexture;
private void run() {
System.out.println("Hello LWJGL " + Version.getVersion() + "!");
initWindow();
initScene();
loop();
}
private void initWindow() {
// Setup an error callback. The default implementation
// will print the error message in System.err.
GLFWErrorCallback.createPrint(System.err).set();
// Initialize GLFW. Most GLFW functions will not work before doing this.
boolean success = glfwInit();
if (!success) {
throw new IllegalStateException("unable to initialize GLFW");
}
glfwWindowHint(GLFW_FOCUSED, GLFW_TRUE);
// Create the window
window = glfwCreateWindow(windowWidth, windowHeight, "Hello World!", NULL, NULL);
if (window == NULL) {
throw new RuntimeException("failed to create GLFW window");
}
// Setup a key callback. It will be called every time a key is pressed, repeated or released.
glfwSetKeyCallback(window, (window, key, scancode, action, mods) -> {
if (action == GLFW_PRESS) {
switch (key) {
case GLFW_KEY_UP:
up = true;
break;
case GLFW_KEY_DOWN:
down = true;
break;
case GLFW_KEY_LEFT:
left = true;
break;
case GLFW_KEY_RIGHT:
right = true;
break;
case GLFW_KEY_EQUAL:
speedUp = true;
break;
case GLFW_KEY_MINUS:
slowDown = true;
break;
case GLFW_KEY_ESCAPE:
geostationary = !geostationary;
break;
case GLFW_KEY_1:
screenShaderProgram = simpleScreenShaderProgram;
break;
case GLFW_KEY_2:
screenShaderProgram = hdrScreenShaderProgram;
break;
case GLFW_KEY_3:
screenShaderProgram = experimentalScreenShaderProgram;
break;
}
} else if (action == GLFW_RELEASE) {
switch (key) {
case GLFW_KEY_UP:
up = false;
break;
case GLFW_KEY_DOWN:
down = false;
break;
case GLFW_KEY_LEFT:
left = false;
break;
case GLFW_KEY_RIGHT:
right = false;
break;
case GLFW_KEY_EQUAL:
speedUp = false;
break;
case GLFW_KEY_MINUS:
slowDown = false;
break;
}
}
});
glfwSetWindowSizeCallback(window, (window1, width, height) -> {
windowWidth = width;
windowHeight = height;
if (GL.getCapabilities() != null) {
updateProjectionMatrix(width, height);
render();
}
});
glfwSetMouseButtonCallback(window, (long window, int button, int action, int mods) -> {
dragging = (action == GLFW_PRESS);
double[] cursorX = new double[1];
double[] cursorY = new double[1];
glfwGetCursorPos(window, cursorX, cursorY);
prevX = cursorX[0];
prevY = cursorY[0];
});
glfwSetCursorPosCallback(window, (long window, double xpos, double ypos) -> {
if (dragging) {
float scale = camDist - GLOBE_RADIUS;
camAzimuth -= (xpos - prevX) * scale * 0.0015f;
camElev += (ypos - prevY) * scale * 0.0015f;
camElev = Math.min(Math.max(camElev, (float) -Math.PI / 2), (float) Math.PI / 2);
updateViewMatrix();
prevX = xpos;
prevY = ypos;
}
});
glfwSetScrollCallback(window, (long window, double xoffset, double yoffset) -> {
camDist -= yoffset * (camDist - GLOBE_RADIUS) * 0.005;
camDist = Math.min(Math.max(camDist, 1.005f), 15);
camLookElev = (float) (1.6 * Math.pow(2, -4 * (camDist - GLOBE_RADIUS)));
updateViewMatrix();
});
int[] pWidth = new int[1];
int[] pHeight = new int[1];
// Get the window size passed to glfwCreateWindow
glfwGetWindowSize(window, pWidth, pHeight);
updateProjectionMatrix(pWidth[0], pHeight[0]);
// Get the resolution of the primary monitor
GLFWVidMode vidmode = glfwGetVideoMode(glfwGetPrimaryMonitor());
// Center the window
glfwSetWindowPos(window,
(vidmode.width() - pWidth[0]) / 2,
(vidmode.height() - pHeight[0]) / 2);
// Make the OpenGL context current
glfwMakeContextCurrent(window);
// Enable v-sync
glfwSwapInterval(1);
// Make the window visible
glfwShowWindow(window);
}
private void initScene() {
// This line is critical for LWJGL's interoperation with GLFW's
// OpenGL context, or any context that is managed externally.
// LWJGL detects the context that is current in the current thread,
// creates the GLCapabilities instance and makes the OpenGL
// bindings available for use.
GL.createCapabilities();
shadowMapShaderProgram = new AdaptiveGlobeShadowMapShaderProgram();
skyboxShaderProgram = new SkyboxShaderProgram();
sunShaderProgram = new SunShaderProgram();
adaptiveGlobeShaderProgram = new AdaptiveGlobeShaderProgram();
if (drawRings) ringsShaderProgram = new RingsShaderProgram();
oceanShaderProgram = new OceanShaderProgram();
cloudShaderProgram = new CloudShaderProgram();
atmosphereCeilingShaderProgram = new AtmosphereCeilingShaderProgram();
auroraShaderProgram = new AuroraShaderProgram();
simpleScreenShaderProgram = new SimpleScreenShaderProgram();
hdrScreenShaderProgram = new HDRScreenShaderProgram();
experimentalScreenShaderProgram = new ExperimentalScreenShaderProgram();
screenShaderProgram = hdrScreenShaderProgram;
globeTexture = TextureLoader.loadTexture2D("res/earth-nasa.jpg");
displacementMap = TextureLoader.loadTexture2D("res/elevation.png");
normalMap = TextureLoader.loadTexture2D("res/normalmap.png");
sunTexture = TextureLoader.loadTexture2D("res/sun.jpg");
starfieldTexture = TextureLoader.loadTextureCube(new String[] {
"res/starmap_8k_4.png",
"res/starmap_8k_3.png",
"res/starmap_8k_6.png",
"res/starmap_8k_5.png",
"res/starmap_8k_2.png",
"res/starmap_8k_1.png"
});
if (drawRings) ringsTexture = TextureLoader.loadTexture1D("res/rings.jpg");
auroraTexture = TextureLoader.loadTexture1D("res/aurora.png");
TextureLoader.TextureFramebuffer shadowMap = TextureLoader.createDepthTextureFrameBuffer(
shadowMapWidth, shadowMapHeight, GL_FLOAT, GL_LINEAR);
if (shadowMap != null) {
shadowMapFramebuffer = shadowMap.frameBufferID;
shadowMapDepthTexture = shadowMap.textureID;
}
TextureLoader.TextureFramebuffer screenBuffer = TextureLoader.createColorTextureFrameBuffer(
windowWidth * 2, windowHeight * 2, GL_FLOAT, GL_NEAREST);
if (screenBuffer != null) {
screenFramebuffer = screenBuffer.frameBufferID;
screenTexture = screenBuffer.textureID;
}
modelMatrix.identity();
viewMatrix.setTranslation(0, 0, -camDist).rotate(camAzimuth, 0, 1, 0).rotate(camElev, 1, 0, 0);
}
private void loop() {
// Run the rendering loop until the user has attempted to close
// the window or has pressed the ESCAPE key.
while (!glfwWindowShouldClose(window)) {
update();
render();
try {
Thread.sleep(20);
} catch (InterruptedException e) {
e.printStackTrace();
}
glfwPollEvents();
}
}
private void update() {
if (up) {
camLookElev += LOOK_SPEED;
}
if (down) {
camLookElev -= LOOK_SPEED;
}
if (left) {
camLookAzimuth += LOOK_SPEED;
}
if (right) {
camLookAzimuth -= LOOK_SPEED;
}
if (speedUp) {
timePassage *= TIME_MOD;
}
if (slowDown) {
timePassage /= TIME_MOD;
}
auroraNoisePhase += AURORA_NOISE_PHASE_INCREMENT * timePassage;
cloudsNoisePhase += CLOUDS_NOISE_PHASE_INCREMENT * timePassage;
experimentalFilterNoisePhase += EXPERIMENTAL_FILTER_NOISE_PHASE_INCREMENT;
globeAzimuth += timePassage;
updateModelMatrix();
if (geostationary) {
camAzimuth += timePassage;
updateViewMatrix();
}
updateMvpMatrix(modelMatrix);
updateLightMatrices();
updateVpRotationMatrix();
updateCamPos();
globe.update(camPosModelSpace, TWO_TAN_HALF_FOV);
}
private void render() {
renderScene();
glBindFramebuffer(GL_FRAMEBUFFER, 0);
glViewport(0, 0, windowWidth * 2, windowHeight * 2);
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
glDisable(GL_CULL_FACE);
glDisable(GL_DEPTH_TEST);
glDisable(GL_BLEND);
glClear(GL_COLOR_BUFFER_BIT);
screenShaderProgram.useProgram();
screenShaderProgram.setTexture(screenTexture);
if (screenShaderProgram instanceof ExperimentalScreenShaderProgram) {
((ExperimentalScreenShaderProgram) screenShaderProgram).setNoisePhase(experimentalFilterNoisePhase);
}
screenGeometry.draw(screenShaderProgram);
glfwSwapBuffers(window); // swap the color buffers
}
private void renderScene() {
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
glEnable(GL_DEPTH_TEST);
glDepthFunc(GL_LEQUAL);
glEnable(GL_CULL_FACE);
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glEnable(GL_VERTEX_PROGRAM_POINT_SIZE);
//render to shadow map
glBindFramebuffer(GL_FRAMEBUFFER, shadowMapFramebuffer);
glClear(GL_DEPTH_BUFFER_BIT);
glViewport(0, 0, shadowMapWidth, shadowMapHeight);
shadowMapShaderProgram.useProgram();
shadowMapShaderProgram.setLightMvpMatrix(lightMvpMatrix);
shadowMapShaderProgram.setDisplacementMap(displacementMap);
shadowMapShaderProgram.setSeaLevel(SEA_LEVEL);
shadowMapShaderProgram.setTerrainScale(TERRAIN_SCALE);
globe.draw(shadowMapShaderProgram, camPosModelSpace, TWO_TAN_HALF_FOV);
glBindFramebuffer(GL_FRAMEBUFFER, screenFramebuffer);
glViewport(0, 0, windowWidth * 2, windowHeight * 2); //TODO check for retina display
//draw starfield
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
skyboxShaderProgram.useProgram();
skyboxShaderProgram.setVpMatrix(projectionMatrix.mul(vpRotationMatrix, vpRotationMatrix));
skyboxShaderProgram.setTexture(starfieldTexture);
skybox.draw(skyboxShaderProgram);
//draw sun
sunShaderProgram.useProgram();
sunShaderProgram.setVpMatrix(vpRotationMatrix);
sunShaderProgram.setSize(500);
sunShaderProgram.setColor(new Vector3f(1.0f, 0.95f, 0.9f));
sunShaderProgram.setTexture(sunTexture);
sun.draw(sunShaderProgram);
//draw globe
adaptiveGlobeShaderProgram.useProgram();
adaptiveGlobeShaderProgram.setMvpMatrix(mvpMatrix);
adaptiveGlobeShaderProgram.setModelMatrix(modelMatrix);
adaptiveGlobeShaderProgram.setLightBiasMvpMatrix(lightBiasMvpMatrix);
adaptiveGlobeShaderProgram.setLightDirection(lightX, lightY, lightZ);
adaptiveGlobeShaderProgram.setDisplacementMap(displacementMap);
adaptiveGlobeShaderProgram.setTexture(globeTexture);
adaptiveGlobeShaderProgram.setNormalMap(normalMap);
adaptiveGlobeShaderProgram.setShadowMap(shadowMapDepthTexture);
adaptiveGlobeShaderProgram.setSeaLevel(SEA_LEVEL);
adaptiveGlobeShaderProgram.setTerrainScale(TERRAIN_SCALE);
adaptiveGlobeShaderProgram.setCamPos(camPos.get(0), camPos.get(1), camPos.get(2));
adaptiveGlobeShaderProgram.setGlobeRadius(GLOBE_RADIUS);
adaptiveGlobeShaderProgram.setAtmosphereWidth(ATMOSPHERE_WIDTH);
globe.draw(adaptiveGlobeShaderProgram, camPosModelSpace, TWO_TAN_HALF_FOV);
//draw rings
if (drawRings) {
glDisable(GL_CULL_FACE);
ringsShaderProgram.useProgram();
ringsShaderProgram.setMvpMatrix(mvpMatrix);
ringsShaderProgram.setLightBiasMvpMatrixMatrix(lightBiasMvpMatrix);
ringsShaderProgram.setTexture(ringsTexture);
ringsShaderProgram.setShadowMap(shadowMapDepthTexture);
rings.draw(ringsShaderProgram);
}
//draw ocean
glEnable(GL_CULL_FACE);
oceanShaderProgram.useProgram();
oceanShaderProgram.setMvpMatrix(mvpMatrix);
oceanShaderProgram.setModelMatrix(modelMatrix);
oceanShaderProgram.setLightDirection(lightX, lightY, lightZ);
oceanShaderProgram.setCamPos(camPos.get(0), camPos.get(1), camPos.get(2));
oceanShaderProgram.setGlobeRadius(GLOBE_RADIUS);
oceanShaderProgram.setElevationMap(displacementMap);
oceanShaderProgram.setSeaLevel(SEA_LEVEL);
oceanShaderProgram.setAtmosphereWidth(ATMOSPHERE_WIDTH);
ocean.draw(oceanShaderProgram);
//draw atmosphere ceiling
glEnable(GL_CULL_FACE);
glCullFace(GL_FRONT);
glBlendFunc(GL_ONE, GL_ONE);
atmosphereCeilingShaderProgram.useProgram();
atmosphereCeilingShaderProgram.setMvpMatrix(mvpMatrix);
atmosphereCeilingShaderProgram.setModelMatrix(modelMatrix);
atmosphereCeilingShaderProgram.setLightDirection(lightX, lightY, lightZ);
atmosphereCeilingShaderProgram.setCamPos(camPos.get(0), camPos.get(1), camPos.get(2));
atmosphereCeilingShaderProgram.setGlobeRadius(GLOBE_RADIUS);
atmosphereCeilingShaderProgram.setAtmosphereWidth(ATMOSPHERE_WIDTH);
atmCeiling.draw(atmosphereCeilingShaderProgram);
//draw cloud layers
glDisable(GL_CULL_FACE);
glDepthMask(false); //perform depth tests, but don't write to the depth buffer
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
cloudShaderProgram.useProgram();
cloudShaderProgram.setMvpMatrix(mvpMatrix);
cloudShaderProgram.setModelMatrix(modelMatrix);
cloudShaderProgram.setLightBiasMvpMatrix(lightBiasMvpMatrix);
cloudShaderProgram.setCloudCoverUnit(0); //TODO
cloudShaderProgram.setShadowMapUnit(shadowMapDepthTexture);
cloudShaderProgram.setLightDirection(lightX, lightY, lightZ);
cloudShaderProgram.setCamPos(camPos.get(0), camPos.get(1), camPos.get(2));
cloudShaderProgram.setGlobeRadius(GLOBE_RADIUS);
cloudShaderProgram.setAtmosphereWidth(ATMOSPHERE_WIDTH);
cloudShaderProgram.setNoisePhase(cloudsNoisePhase);
cloudLayer1.draw(cloudShaderProgram);
glDepthMask(true);
//draw aurora
glDisable(GL_CULL_FACE);
glDepthMask(false); //perform depth tests, but don't write to the depth buffer
glBlendFunc(GL_ONE, GL_ONE);
updateMvpMatrix(auroraModelMatrix);
auroraShaderProgram.useProgram();
auroraShaderProgram.setMvpMatrix(mvpMatrix);
auroraShaderProgram.setTexture(auroraTexture);
auroraShaderProgram.setNoisePhase(auroraNoisePhase);
auroraShaderProgram.setPolarAngle(AURORA_POLAR_ANGLE);
aurora.draw(auroraShaderProgram);
auroraShaderProgram.setPolarAngle((float) Math.PI + AURORA_POLAR_ANGLE);
aurora.draw(auroraShaderProgram);
glEnable(GL_CULL_FACE);
glCullFace(GL_BACK);
glDepthMask(true);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
}
private void updateProjectionMatrix(int width, int height) {
projectionMatrix.identity();
projectionMatrix.perspective(FOV, (float) width / (float) height, 0.001f, 20f);
}
private void updateViewMatrix() {
viewMatrix.identity();
viewMatrix
.rotate(-camLookElev, 1, 0, 0)
.rotate(-camLookAzimuth, 0, 1, 0)
.translate(0, 0, -camDist)
.rotate(camElev, 1, 0, 0)
.rotate(-camAzimuth, 0, 1, 0);
}
private void updateModelMatrix() {
modelMatrix.identity();
modelMatrix.rotate(globeAzimuth, 0, 1, 0);
modelMatrix.invert(inverseModelMatrix);
auroraModelMatrix.set(modelMatrix)
.rotate(-GEOMAGNETIC_POLE_LATITUDE, 1, 0, 0)
.rotate(GEOMAGNETIC_POLE_LONGITUDE, 0, 1, 0);
}
private void updateLightMatrices() {
lightMvpMatrix.set(lightProjectionMatrix).mul(lightViewMatrix).mul(modelMatrix);
lightBiasMvpMatrix.set(lightBiasMatrix).mul(lightMvpMatrix);
}
private void updateMvpMatrix(Matrix4f modelMatrix) {
mvpMatrix.set(projectionMatrix).mul(viewMatrix).mul(modelMatrix);
}
private void updateVpRotationMatrix() {
vpRotationMatrix.set(viewMatrix);
vpRotationMatrix.m30(0);
vpRotationMatrix.m31(0);
vpRotationMatrix.m32(0);
}
private void updateCamPos() {
camPos.set(
(float) (camDist * Math.sin(camAzimuth) * Math.cos(camElev)),
(float) (camDist * Math.sin(camElev)),
(float) (camDist * Math.cos(camAzimuth) * Math.cos(camElev)));
camPos4.set(camPos, 1.0f);
inverseModelMatrix.transform(camPos4, camPosModelSpace4);
camPosModelSpace.set(
camPosModelSpace4.get(0),
camPosModelSpace4.get(1),
camPosModelSpace4.get(2));
}
public static void main(String[] args) {
new HelloWorld().run();
}
}
| |
package com.toddfast.mutagen.cassandra;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.nio.file.Paths;
import java.security.DigestInputStream;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.datastax.driver.core.Session;
import com.toddfast.mutagen.Mutation;
import com.toddfast.mutagen.State;
import com.toddfast.mutagen.basic.SimpleState;
import com.toddfast.mutagen.cassandra.utils.DBUtils;
/**
* Base class for cassandra mutation.
* An {@link Mutation} implementation for cassandra.
* Represents a single change that can be made to a resource,identified
* unambiguously by a state.
*
*/
public abstract class AbstractCassandraMutation implements Mutation<String> {
// //////////////////////////////////////////////////////////////////////////
// Fields
// //////////////////////////////////////////////////////////////////////////
private final static String VERSION_PATTERN = "M(\\d{12})_.*";
private static Logger LOGGER = LoggerFactory.getLogger(AbstractCassandraMutation.class);
private Session session; // session
private State<String> version;
private boolean ignoreDB;
/**
* Constructor for AbstractCassandraMutation.
*
* @param session
* the session to execute cql statement
*/
public AbstractCassandraMutation(Session session) {
setSession(session);
version = null;
}
/**
* Get the string of mutation state.
*
* @return string representing the mutation state.
*/
@Override
public String toString() {
return getResourceName() + "[state=" + getResultingState().getID() + "]";
}
/**
* Returns the state of a resource.
* The state represents the datetime of the resource with the name convention:<br>
* M<DATETIME>_<Camel case title>_<ISSUE>.cqlsh.txt<br>
* M<DATETIME>_<Camel case title>_<ISSUE>.java<br>
*
* @param resourceName
* the name of resource.
* @return
* the state of a resource.
*/
protected final State<String> parseVersion(String resourceName) {
LOGGER.trace("Entering parseVersion(resourceName={})", resourceName);
String filename = Paths.get(resourceName).getFileName().toString();
Matcher matcher = Pattern.compile(VERSION_PATTERN).matcher(filename);
if (!matcher.matches()) {
throw new IllegalArgumentException("Resource name [" + filename + "] does not match with pattern [" + VERSION_PATTERN + "] for extracting version");
}
String version = matcher.group(1);
LOGGER.trace("Leaving parseVersion() : {}", version);
return new SimpleState<>(version);
}
/**
* Override to perform the actual mutation.
*
* @param context
* Logs to {@link System#out} and {@link System#err}
*/
protected abstract void performMutation(Context context);
/**
* Get the state after mutation.
*
* @return state
*/
@Override
public State<String> getResultingState() {
LOGGER.trace("Entering getResultingState()");
if (version == null)
version = parseVersion(getResourceName());
LOGGER.trace("Leaving getResultingState() : {}", version);
return version;
}
/**
* Override to get the name of resource.
*
*/
public abstract String getResourceName();
/**
* Performs the actual mutation and then updates the recorded schema version.
*
*/
@Override
public final void mutate(Context context) {
LOGGER.trace("Entering mutate(context={})", context);
RuntimeException mutateException = null;
// Perform the mutation
boolean success = true;
long startTime = System.currentTimeMillis();
try {
LOGGER.trace("Entering performMutation(context={})", context);
performMutation(context);
LOGGER.trace("Leaving performMutation()");
} catch (RuntimeException e) {
success = false;
mutateException = e;
}
long endTime = System.currentTimeMillis();
long execution_time = endTime - startTime;
String version = getResultingState().getID();
// caculate the checksum
String checksum = getChecksum();
// append version record
if (!isIgnoreDB()) {
DBUtils.appendVersionRecord(session, version, getResourceName(), checksum, (int) execution_time,
(success ? MutationStatus.SUCCESS.getValue() : MutationStatus.FAILED.getValue()));
}
if (mutateException != null) {
throw mutateException;
}
LOGGER.trace("Leaving mutate()");
}
/**
*
* @return the MD5 hash of the current mutation
*/
public abstract String getChecksum();
/**
* Generate the MD5 hash for a key.
*
* @param inputStream
* the string to be hashed.
* @return
* the MD5 hash for the key.
*/
public static byte[] md5(InputStream inputStream) {
MessageDigest algorithm;
try {
algorithm = MessageDigest.getInstance("MD5");
} catch (NoSuchAlgorithmException ex) {
throw new RuntimeException(ex);
}
DigestInputStream digestInputStream = new DigestInputStream(inputStream, algorithm);
try {
while (digestInputStream.read() != -1){}
} catch (IOException e) {
throw new RuntimeException(e);
}
//messageDigest
return algorithm.digest();
}
/**
* change the hash of a key into hexadecimal format
*
* @param inputStream
* the inputStream to be hashed.
* @return
* the hexadecimal format of hash of a key.
*/
public static String md5String(InputStream inputStream) {
byte[] messageDigest = md5(inputStream);
return toHex(messageDigest);
}
/**
* Encode a byte array as a hexadecimal string
*
* @param bytes
* byte array
* @return
* hexadecimal format for the byte array
*/
public static String toHex(byte[] bytes) {
StringBuilder hexString = new StringBuilder();
for (byte aByte : bytes) {
String hex = Integer.toHexString(0xFF & aByte);
if (hex.length() == 1) {
hexString.append('0');
}
hexString.append(hex);
}
return hexString.toString();
}
/**
* A getter method for session.
*/
protected Session getSession() {
return session;
}
public void setSession(Session session) {
this.session = session;
}
public boolean isIgnoreDB() {
return ignoreDB;
}
public void setIgnoreDB(boolean ignoreDB) {
this.ignoreDB = ignoreDB;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tika.bundle;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.ops4j.pax.exam.CoreOptions.bundle;
import static org.ops4j.pax.exam.CoreOptions.junitBundles;
import static org.ops4j.pax.exam.CoreOptions.mavenBundle;
import static org.ops4j.pax.exam.CoreOptions.options;
import static org.ops4j.pax.exam.CoreOptions.systemProperty;
import javax.inject.Inject;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.StringWriter;
import java.io.Writer;
import java.net.URISyntaxException;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
import java.util.jar.Attributes;
import java.util.jar.JarInputStream;
import java.util.jar.Manifest;
import org.apache.tika.Tika;
import org.apache.tika.detect.DefaultDetector;
import org.apache.tika.detect.Detector;
import org.apache.tika.fork.ForkParser;
import org.apache.tika.metadata.Metadata;
import org.apache.tika.mime.MediaType;
import org.apache.tika.parser.CompositeParser;
import org.apache.tika.parser.DefaultParser;
import org.apache.tika.parser.ParseContext;
import org.apache.tika.parser.Parser;
import org.apache.tika.parser.internal.Activator;
import org.apache.tika.parser.ocr.TesseractOCRParser;
import org.apache.tika.sax.BodyContentHandler;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.ops4j.pax.exam.Configuration;
import org.ops4j.pax.exam.Option;
import org.ops4j.pax.exam.junit.PaxExam;
import org.ops4j.pax.exam.spi.reactors.ExamReactorStrategy;
import org.ops4j.pax.exam.spi.reactors.PerMethod;
import org.ops4j.pax.exam.util.PathUtils;
import org.osgi.framework.Bundle;
import org.osgi.framework.BundleContext;
import org.osgi.framework.ServiceReference;
import org.xml.sax.ContentHandler;
@RunWith(PaxExam.class)
@ExamReactorStrategy(PerMethod.class)
public class BundleIT {
@Inject
private Parser defaultParser;
@Inject
private Detector contentTypeDetector;
@Inject
private BundleContext bc;
private String log4jConfigPath = "file:" + PathUtils.getBaseDir() + "/src/test/resources/log4j.properties";
private String testBundlesPath = "file:" + PathUtils.getBaseDir() + "/target/test-bundles/";
@Configuration
public Option[] configuration() throws IOException, URISyntaxException, ClassNotFoundException {
return options(
bundle(testBundlesPath + "tika-core.jar"),
bundle(testBundlesPath + "tika-bundle.jar"),
junitBundles(),
mavenBundle("org.slf4j", "slf4j-api", "1.7.24"),
mavenBundle("org.slf4j", "slf4j-log4j12", "1.7.24").noStart(),
mavenBundle("org.slf4j", "jcl-over-slf4j", "1.7.24"),
mavenBundle("org.slf4j", "jul-to-slf4j", "1.7.24"),
mavenBundle("log4j", "log4j", "1.2.17"),
systemProperty("log4j.configuration").value(log4jConfigPath)
);
}
@Test
public void testBundleLoaded() throws Exception {
boolean hasCore = false, hasBundle = false;
for (Bundle b : bc.getBundles()) {
if ("org.apache.tika.core".equals(b.getSymbolicName())) {
hasCore = true;
assertEquals("Core not activated", Bundle.ACTIVE, b.getState());
}
if ("org.apache.tika.bundle".equals(b.getSymbolicName())) {
hasBundle = true;
assertEquals("Bundle not activated", Bundle.ACTIVE, b.getState());
}
}
assertTrue("Core bundle not found", hasCore);
assertTrue("Bundle bundle not found", hasBundle);
}
@Test
public void testManifestNoJUnit() throws Exception {
File TARGET = new File("target");
File base = new File(TARGET, "test-bundles");
File tikaBundle = new File(base, "tika-bundle.jar");
JarInputStream jarIs = new JarInputStream(new FileInputStream(tikaBundle));
Manifest mf = jarIs.getManifest();
Attributes main = mf.getMainAttributes();
String importPackage = main.getValue("Import-Package");
boolean containsJunit = importPackage.contains("junit");
assertFalse("The bundle should not import junit", containsJunit);
}
@Test
public void testBundleDetection() throws Exception {
Metadata metadataTXT = new Metadata();
metadataTXT.set(Metadata.RESOURCE_NAME_KEY, "test.txt");
Metadata metadataPDF = new Metadata();
metadataPDF.set(Metadata.RESOURCE_NAME_KEY, "test.pdf");
// Simple type detection
assertEquals(MediaType.TEXT_PLAIN, contentTypeDetector.detect(null, metadataTXT));
assertEquals(MediaType.application("pdf"), contentTypeDetector.detect(null, metadataPDF));
}
@Test
public void testForkParser() throws Exception {
ForkParser parser = new ForkParser(Activator.class.getClassLoader(), defaultParser);
parser.setJavaCommand(Arrays.asList("java", "-Xmx32m", "-Dlog4j.configuration=" + log4jConfigPath));
String data = "<!DOCTYPE html>\n<html><body><p>test <span>content</span></p></body></html>";
InputStream stream = new ByteArrayInputStream(data.getBytes(UTF_8));
Writer writer = new StringWriter();
ContentHandler contentHandler = new BodyContentHandler(writer);
Metadata metadata = new Metadata();
MediaType type = contentTypeDetector.detect(stream, metadata);
assertEquals(type.toString(), "text/html");
metadata.add(Metadata.CONTENT_TYPE, type.toString());
ParseContext parseCtx = new ParseContext();
parser.parse(stream, contentHandler, metadata, parseCtx);
writer.flush();
String content = writer.toString();
assertTrue(content.length() > 0);
assertEquals("test content", content.trim());
}
@Test
public void testBundleSimpleText() throws Exception {
Tika tika = new Tika();
// Simple text extraction
String xml = tika.parseToString(new File("pom.xml"));
assertTrue(xml.contains("tika-bundle"));
}
@Test
public void testBundleDetectors() throws Exception {
//For some reason, the detector created by OSGi has a flat
//list of detectors, whereas the detector created by the traditional
//service loading method has children: DefaultDetector, MimeTypes.
//We have to flatten the service loaded DefaultDetector to get equivalence.
//Detection behavior should all be the same.
// Get the classes found within OSGi
ServiceReference<Detector> detectorRef = bc.getServiceReference(Detector.class);
DefaultDetector detectorService = (DefaultDetector) bc.getService(detectorRef);
Set<String> osgiDetectors = new HashSet<>();
for (Detector d : detectorService.getDetectors()) {
osgiDetectors.add(d.getClass().getName());
}
// Check we did get a few, just in case...
assertTrue("Should have several Detector names, found " + osgiDetectors.size(),
osgiDetectors.size() > 3);
// Get the raw detectors list from the traditional service loading mechanism
DefaultDetector detector = new DefaultDetector();
Set<String> rawDetectors = new HashSet<String>();
for (Detector d : detector.getDetectors()) {
if (d instanceof DefaultDetector) {
for (Detector dChild : ((DefaultDetector) d).getDetectors()) {
rawDetectors.add(dChild.getClass().getName());
}
} else {
rawDetectors.add(d.getClass().getName());
}
}
assertEquals(osgiDetectors, rawDetectors);
}
@Test
public void testBundleParsers() throws Exception {
// Get the classes found within OSGi
ServiceReference<Parser> parserRef = bc.getServiceReference(Parser.class);
DefaultParser parserService = (DefaultParser) bc.getService(parserRef);
Set<String> osgiParsers = new HashSet<>();
for (Parser p : parserService.getAllComponentParsers()) {
osgiParsers.add(p.getClass().getName());
}
// Check we did get a few, just in case...
assertTrue("Should have lots Parser names, found " + osgiParsers.size(),
osgiParsers.size() > 15);
// Get the raw parsers list from the traditional service loading mechanism
CompositeParser parser = (CompositeParser) defaultParser;
Set<String> rawParsers = new HashSet<>();
for (Parser p : parser.getAllComponentParsers()) {
if (p instanceof DefaultParser) {
for (Parser pChild : ((DefaultParser) p).getAllComponentParsers()) {
rawParsers.add(pChild.getClass().getName());
}
} else {
rawParsers.add(p.getClass().getName());
}
}
assertEquals(rawParsers, osgiParsers);
}
@Test
public void testTesseractParser() throws Exception {
ContentHandler handler = new BodyContentHandler();
ParseContext context = new ParseContext();
Parser tesseractParser = new TesseractOCRParser();
try (InputStream stream = new FileInputStream("src/test/resources/testOCR.jpg")) {
tesseractParser.parse(stream, handler, new Metadata(), context);
}
}
@Test
public void testTikaBundle() throws Exception {
Tika tika = new Tika();
// Package extraction
ContentHandler handler = new BodyContentHandler();
Parser parser = tika.getParser();
ParseContext context = new ParseContext();
context.set(Parser.class, parser);
try (InputStream stream =
new FileInputStream("src/test/resources/test-documents.zip")) {
parser.parse(stream, handler, new Metadata(), context);
}
String content = handler.toString();
assertTrue(content.contains("testEXCEL.xls"));
assertTrue(content.contains("Sample Excel Worksheet"));
assertTrue(content.contains("testHTML.html"));
assertTrue(content.contains("Test Indexation Html"));
assertTrue(content.contains("testOpenOffice2.odt"));
assertTrue(content.contains("This is a sample Open Office document"));
assertTrue(content.contains("testPDF.pdf"));
assertTrue(content.contains("Apache Tika"));
assertTrue(content.contains("testPPT.ppt"));
assertTrue(content.contains("Sample Powerpoint Slide"));
assertTrue(content.contains("testRTF.rtf"));
assertTrue(content.contains("indexation Word"));
assertTrue(content.contains("testTXT.txt"));
assertTrue(content.contains("Test d'indexation de Txt"));
assertTrue(content.contains("testWORD.doc"));
assertTrue(content.contains("This is a sample Microsoft Word Document"));
assertTrue(content.contains("testXML.xml"));
assertTrue(content.contains("Rida Benjelloun"));
}
}
| |
package fr.synchrotron.soleil.ica.ci.lib.mongodb.domainobjects.artifact;
import fr.synchrotron.soleil.ica.ci.lib.mongodb.domainobjects.artifact.ext.ArtifactDocumentForC;
import fr.synchrotron.soleil.ica.ci.lib.mongodb.domainobjects.artifact.ext.ArtifactDocumentForJava;
import fr.synchrotron.soleil.ica.ci.lib.mongodb.domainobjects.artifact.ext.BuildContext;
import java.util.Date;
import java.util.List;
/**
* @author Gregory Boissinot
*/
public class ArtifactDocument {
public static final String MONGO_ARTIFACTS_COLLECTION_NAME = "artifacts";
private String org;
private String name;
private String version;
private String status;
private String type;
private boolean thirdParty;
private Date creationDate;
private Date publicationDate;
private String sha1;
private String md5;
private String description;
private String fileExtension;
private Long fileSize;
private boolean force;
private ArtifactDocumentForJava javaLanguage;
private ArtifactDocumentForC cLanguage;
private BuildContext buildContext;
private List<String> modules;
public ArtifactDocument() {
}
public ArtifactDocument(String org, String name, String version, String status) {
this.org = org;
this.name = name;
this.version = version;
this.status = status;
}
public ArtifactDocumentKey getKey() {
return new ArtifactDocumentKey(org, name, version, status);
}
public String getOrg() {
return org;
}
public void setOrg(String org) {
this.org = org;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getVersion() {
return version;
}
public void setVersion(String version) {
this.version = version;
}
public String getStatus() {
return status;
}
public void setStatus(String status) {
this.status = status;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public boolean isThirdParty() {
return thirdParty;
}
public void setThirdParty(boolean thirdParty) {
this.thirdParty = thirdParty;
}
public Date getCreationDate() {
return creationDate;
}
public void setCreationDate(Date creationDate) {
this.creationDate = creationDate;
}
public Date getPublicationDate() {
return publicationDate;
}
public void setPublicationDate(Date publicationDate) {
this.publicationDate = publicationDate;
}
public String getSha1() {
return sha1;
}
public void setSha1(String sha1) {
this.sha1 = sha1;
}
public String getMd5() {
return md5;
}
public void setMd5(String md5) {
this.md5 = md5;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String getFileExtension() {
return fileExtension;
}
public void setFileExtension(String fileExtension) {
this.fileExtension = fileExtension;
}
public Long getFileSize() {
return fileSize;
}
public void setFileSize(Long fileSize) {
this.fileSize = fileSize;
}
public boolean isForce() {
return force;
}
public void setForce(boolean force) {
this.force = force;
}
public ArtifactDocumentForJava getJavaLanguage() {
return javaLanguage;
}
public void setJavaLanguage(ArtifactDocumentForJava javaLanguage) {
this.javaLanguage = javaLanguage;
}
public ArtifactDocumentForC getcLanguage() {
return cLanguage;
}
public void setcLanguage(ArtifactDocumentForC cLanguage) {
this.cLanguage = cLanguage;
}
public BuildContext getBuildContext() {
return buildContext;
}
public void setBuildContext(BuildContext buildContext) {
this.buildContext = buildContext;
}
public List<String> getModules() {
return modules;
}
public void setModules(List<String> modules) {
this.modules = modules;
}
@Override
public String toString() {
return "ArtifactDocument{" +
"org='" + org + '\'' +
", name='" + name + '\'' +
", version='" + version + '\'' +
", status='" + status + '\'' +
", type='" + type + '\'' +
", thirdParty=" + thirdParty +
", creationDate=" + creationDate +
", publicationDate=" + publicationDate +
", sha1='" + sha1 + '\'' +
", md5='" + md5 + '\'' +
", description='" + description + '\'' +
", fileExtension='" + fileExtension + '\'' +
", fileSize=" + fileSize +
", isForce=" + force +
", javaLanguage=" + javaLanguage +
", cLanguage=" + cLanguage +
", buildContext=" + buildContext +
", modules=" + modules +
'}';
}
}
| |
/*
* @(#)DayTimeDurationAttribute.java
*
* Copyright 2003-2004 Sun Microsystems, Inc. All Rights Reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistribution of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistribution in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* Neither the name of Sun Microsystems, Inc. or the names of contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* This software is provided "AS IS," without a warranty of any kind. ALL
* EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND WARRANTIES, INCLUDING
* ANY IMPLIED WARRANTY OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE
* OR NON-INFRINGEMENT, ARE HEREBY EXCLUDED. SUN MICROSYSTEMS, INC. ("SUN")
* AND ITS LICENSORS SHALL NOT BE LIABLE FOR ANY DAMAGES SUFFERED BY LICENSEE
* AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THIS SOFTWARE OR ITS
* DERIVATIVES. IN NO EVENT WILL SUN OR ITS LICENSORS BE LIABLE FOR ANY LOST
* REVENUE, PROFIT OR DATA, OR FOR DIRECT, INDIRECT, SPECIAL, CONSEQUENTIAL,
* INCIDENTAL OR PUNITIVE DAMAGES, HOWEVER CAUSED AND REGARDLESS OF THE THEORY
* OF LIABILITY, ARISING OUT OF THE USE OF OR INABILITY TO USE THIS SOFTWARE,
* EVEN IF SUN HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
*
* You acknowledge that this software is not designed or intended for use in
* the design, construction, operation or maintenance of any nuclear facility.
*/
package com.sun.xacml.attr;
import com.sun.xacml.ParsingException;
import java.math.BigInteger;
import java.net.URI;
import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException;
import java.util.regex.Matcher;
import org.w3c.dom.Node;
/**
* Representation of an xf:dayTimeDuration value. This class supports parsing
* xd:dayTimeDuration values. All objects of this class are immutable and
* thread-safe. The <code>Date</code> objects returned are not, but
* these objects are cloned before being returned.
*
* @since 1.0
* @author Steve Hanna
*/
public class DayTimeDurationAttribute extends AttributeValue
{
/**
* Official name of this type
*/
public static final String identifier =
"http://www.w3.org/TR/2002/WD-xquery-operators-20020816#" +
"dayTimeDuration";
/**
* URI version of name for this type
* <p>
* This field is initialized by a static initializer so that
* we can catch any exceptions thrown by URI(String) and
* transform them into a RuntimeException, since this should
* never happen but should be reported properly if it ever does.
*/
private static URI identifierURI;
/**
* RuntimeException that wraps an Exception thrown during the
* creation of identifierURI, null if none.
*/
private static RuntimeException earlyException;
/**
* Static initializer that initializes the identifierURI
* class field so that we can catch any exceptions thrown
* by URI(String) and transform them into a RuntimeException.
* Such exceptions should never happen but should be reported
* properly if they ever do.
*/
static {
try {
identifierURI = new URI(identifier);
} catch (Exception e) {
earlyException = new IllegalArgumentException();
earlyException.initCause(e);
}
};
/**
* Regular expression for dayTimeDuration (a la java.util.regex)
*/
private static final String patternString =
"(\\-)?P((\\d+)?D)?(T((\\d+)?H)?((\\d+)?M)?((\\d+)?(.(\\d+)?)?S)?)?";
/**
* The index of the capturing group for the negative sign.
*/
private static final int GROUP_SIGN = 1;
/**
* The index of the capturing group for the number of days.
*/
private static final int GROUP_DAYS = 3;
/**
* The index of the capturing group for the number of hours.
*/
private static final int GROUP_HOURS = 6;
/**
* The index of the capturing group for the number of minutes.
*/
private static final int GROUP_MINUTES = 8;
/**
* The index of the capturing group for the number of seconds.
*/
private static final int GROUP_SECONDS = 10;
/**
* The index of the capturing group for the number of nanoseconds.
*/
private static final int GROUP_NANOSECONDS = 12;
/**
* Static BigInteger values. We only use these if one of
* the components is bigger than Integer.MAX_LONG and we
* want to detect overflow, so we don't initialize these
* until they're needed.
*/
private static BigInteger big24;
private static BigInteger big60;
private static BigInteger big1000;
private static BigInteger bigMaxLong;
/**
* A shared Pattern object, only initialized if needed
*/
private static Pattern pattern;
/**
* Negative flag. true if duration is negative, false otherwise
*/
private boolean negative;
/**
* Number of days
*/
private long days;
/**
* Number of hours
*/
private long hours;
/**
* Number of minutes
*/
private long minutes;
/**
* Number of seconds
*/
private long seconds;
/**
* Number of nanoseconds
*/
private int nanoseconds;
/**
* Total number of round seconds (in milliseconds)
*/
private long totalMillis;
/**
* Cached encoded value (null if not cached yet).
*/
private String encodedValue = null;
/**
* Creates a new <code>DayTimeDurationAttribute</code> that represents
* the duration supplied.
*
* @param negative true if the duration is negative, false otherwise
* @param days the number of days in the duration
* @param hours the number of hours in the duration
* @param minutes the number of minutes in the duration
* @param seconds the number of seconds in the duration
* @param nanoseconds the number of nanoseconds in the duration
* @throws IllegalArgumentException if the total number of milliseconds
* exceeds Long.MAX_LONG
*/
public DayTimeDurationAttribute(boolean negative, long days, long hours,
long minutes, long seconds,
int nanoseconds)
throws IllegalArgumentException {
super(identifierURI);
// Shouldn't happen, but just in case...
if (earlyException != null)
throw earlyException;
this.negative = negative;
this.days = days;
this.hours = hours;
this.minutes = minutes;
this.seconds = seconds;
this.nanoseconds = nanoseconds;
// Convert all the components except nanoseconds to milliseconds
// If any of the components is big (too big to be an int),
// use the BigInteger class to do the math so we can detect
// overflow.
if ((days > Integer.MAX_VALUE) || (hours > Integer.MAX_VALUE) ||
(minutes > Integer.MAX_VALUE) || (seconds > Integer.MAX_VALUE)) {
if (big24 == null) {
big24 = BigInteger.valueOf(24);
big60 = BigInteger.valueOf(60);
big1000 = BigInteger.valueOf(1000);
bigMaxLong = BigInteger.valueOf(Long.MAX_VALUE);
}
BigInteger bigDays = BigInteger.valueOf(days);
BigInteger bigHours = BigInteger.valueOf(hours);
BigInteger bigMinutes = BigInteger.valueOf(minutes);
BigInteger bigSeconds = BigInteger.valueOf(seconds);
BigInteger bigTotal = bigDays.multiply(big24).add(bigHours)
.multiply(big60).add(bigMinutes).multiply(big60)
.add(bigSeconds).multiply(big1000);
// If the result is bigger than Long.MAX_VALUE, we have an
// overflow. Indicate an error (should be a processing error,
// since it can be argued that we should handle gigantic
// values for this).
if (bigTotal.compareTo(bigMaxLong) == 1)
throw new IllegalArgumentException("total number of " +
"milliseconds " +
"exceeds Long.MAX_VALUE");
// If no overflow, convert to a long.
totalMillis = bigTotal.longValue();
} else {
// The numbers are small, so do it the fast way.
totalMillis = ((((((days * 24) + hours) * 60) + minutes) * 60) +
seconds) * 1000;
}
}
/**
* Returns a new <code>DayTimeDurationAttribute</code> that represents
* the xf:dayTimeDuration at a particular DOM node.
*
* @param root the <code>Node</code> that contains the desired value
* @return a new <code>DayTimeDurationAttribute</code> representing the
* appropriate value (null if there is a parsing error)
*/
public static DayTimeDurationAttribute getInstance(Node root)
throws ParsingException, NumberFormatException
{
return getInstance(root.getFirstChild().getNodeValue());
}
/**
* Returns the long value for the capturing group groupNumber.
* This method takes a Matcher that has been used to match a
* Pattern against a String, fetches the value for the specified
* capturing group, converts that value to an long, and returns
* the value. If that group did not match, 0 is returned.
* If the matched value is not a valid long, NumberFormatException
* is thrown.
*
* @param matcher the Matcher from which to fetch the group
* @param groupNumber the group number to fetch
* @return the long value for that groupNumber
* @throws NumberFormatException if the string value for that
* groupNumber is not a valid long
*/
private static long parseGroup(Matcher matcher, int groupNumber)
throws NumberFormatException {
long groupLong = 0;
if (matcher.start(groupNumber) != -1) {
String groupString = matcher.group(groupNumber);
groupLong = Long.parseLong(groupString);
}
return groupLong;
}
/**
* Returns a new <code>DayTimeDurationAttribute</code> that represents
* the xf:dayTimeDuration value indicated by the string provided.
*
* @param value a string representing the desired value
* @return a new <code>DayTimeDurationAttribute</code> representing the
* desired value (null if there is a parsing error)
*/
public static DayTimeDurationAttribute getInstance(String value)
throws ParsingException, NumberFormatException
{
boolean negative = false;
long days = 0;
long hours = 0;
long minutes = 0;
long seconds = 0;
int nanoseconds = 0;
// Compile the pattern, if not already done.
// No thread-safety problem here. The worst that can
// happen is that we initialize pattern several times.
if (pattern == null) {
try {
pattern = Pattern.compile(patternString);
} catch (PatternSyntaxException e) {
// This should never happen
throw new ParsingException("unexpected pattern match error");
}
}
// See if the value matches the pattern.
Matcher matcher = pattern.matcher(value);
boolean matches = matcher.matches();
// If not, syntax error!
if (!matches) {
throw new ParsingException("Syntax error in dayTimeDuration");
}
// If the negative group matched, the value is negative.
if (matcher.start(GROUP_SIGN) != -1)
negative = true;
try {
// If the days group matched, parse that value.
days = parseGroup(matcher, GROUP_DAYS);
// If the hours group matched, parse that value.
hours = parseGroup(matcher, GROUP_HOURS);
// If the minutes group matched, parse that value.
minutes = parseGroup(matcher, GROUP_MINUTES);
// If the seconds group matched, parse that value.
seconds = parseGroup(matcher, GROUP_SECONDS);
// Special handling for fractional seconds, since
// they can have any resolution.
if (matcher.start(GROUP_NANOSECONDS) != -1) {
String nanosecondString = matcher.group(GROUP_NANOSECONDS);
// If there are less than 9 digits in the fractional seconds,
// pad with zeros on the right so it's nanoseconds.
while (nanosecondString.length() < 9)
nanosecondString += "0";
// If there are more than 9 digits in the fractional seconds,
// drop the least significant digits.
if (nanosecondString.length() > 9) {
nanosecondString = nanosecondString.substring(0, 9);
}
nanoseconds = Integer.parseInt(nanosecondString);
}
} catch (NumberFormatException e) {
// If we run into a number that's too big to be a long
// that's an error. Really, it's a processing error,
// since one can argue that we should handle that.
throw e;
}
// Here's a requirement that's not checked for in the pattern.
// The designator 'T' must be absent if all the time
// items are absent. So the string can't end in 'T'.
// Note that we don't have to worry about a zero length
// string, since the pattern won't allow that.
if (value.charAt(value.length()-1) == 'T')
throw new ParsingException("'T' must be absent if all" +
"time items are absent");
// If parsing went OK, create a new DayTimeDurationAttribute object and
// return it.
return new DayTimeDurationAttribute(negative, days, hours, minutes,
seconds, nanoseconds);
}
/**
* Returns true if the duration is negative.
*
* @return true if the duration is negative, false otherwise
*/
public boolean isNegative() {
return negative;
}
/**
* Gets the number of days.
*
* @return the number of days
*/
public long getDays() {
return days;
}
/**
* Gets the number of hours.
*
* @return the number of hours
*/
public long getHours() {
return hours;
}
/**
* Gets the number of minutes.
*
* @return the number of minutes
*/
public long getMinutes() {
return minutes;
}
/**
* Gets the number of seconds.
*
* @return the number of seconds
*/
public long getSeconds() {
return seconds;
}
/**
* Gets the number of nanoseconds.
*
* @return the number of nanoseconds
*/
public int getNanoseconds() {
return nanoseconds;
}
/**
* Gets the total number of round seconds (in milliseconds).
*
* @return the total number of seconds (in milliseconds)
*/
public long getTotalSeconds() {
return totalMillis;
}
/**
* Returns true if the input is an instance of this class and if its
* value equals the value contained in this class.
*
* @param o the object to compare
*
* @return true if this object and the input represent the same value
*/
public boolean equals(Object o) {
if (! (o instanceof DayTimeDurationAttribute))
return false;
DayTimeDurationAttribute other = (DayTimeDurationAttribute)o;
return ((totalMillis == other.totalMillis) &&
(nanoseconds == other.nanoseconds) &&
(negative == other.negative));
}
/**
* Returns the hashcode value used to index and compare this object with
* others of the same type. Typically this is the hashcode of the backing
* data object.
*
* @return the object's hashcode value
*/
public int hashCode() {
// The totalMillis, nanoseconds, and negative fields are all considered
// by the equals method, so it's best if the hashCode is derived
// from all of those fields.
int hashCode = (int) totalMillis ^ (int) (totalMillis >> 32);
hashCode = 31*hashCode + nanoseconds;
if (negative)
hashCode = -hashCode;
return hashCode;
}
/**
* Converts to a String representation.
*
* @return the String representation
*/
public String toString() {
StringBuffer sb = new StringBuffer();
sb.append("DayTimeDurationAttribute: [\n");
sb.append(" Negative: " + negative);
sb.append(" Days: " + days);
sb.append(" Hours: " + hours);
sb.append(" Minutes: " + minutes);
sb.append(" Seconds: " + seconds);
sb.append(" Nanoseconds: " + nanoseconds);
sb.append(" TotalSeconds: " + totalMillis);
sb.append("]");
return sb.toString();
}
/**
* Encodes the value in a form suitable for including in XML data like
* a request or an obligation. This must return a value that could in
* turn be used by the factory to create a new instance with the same
* value.
*
* @return a <code>String</code> form of the value
*/
public String encode() {
if (encodedValue != null)
return encodedValue;
// Length is quite variable
StringBuffer buf = new StringBuffer(10);
if (negative)
buf.append('-');
buf.append('P');
if (days != 0) {
buf.append(Long.toString(days));
buf.append('D');
}
if ((hours != 0) || (minutes != 0)
|| (seconds != 0) || (nanoseconds != 0)) {
// Only include the T if there are some time fields
buf.append('T');
} else {
// Make sure that there's always at least one field specified
if (days == 0)
buf.append("0D");
}
if (hours != 0) {
buf.append(Long.toString(hours));
buf.append('H');
}
if (minutes != 0) {
buf.append(Long.toString(minutes));
buf.append('M');
}
if ((seconds != 0) || (nanoseconds != 0)) {
buf.append(Long.toString(seconds));
if (nanoseconds != 0) {
buf.append('.');
buf.append(DateAttribute.zeroPadInt(nanoseconds, 9));
}
buf.append('S');
}
encodedValue = buf.toString();
return encodedValue;
}
}
| |
/*
* Copyright 2011 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.custommapsapp.android;
import com.custommapsapp.android.kml.GroundOverlay;
import com.custommapsapp.android.kml.KmlFile;
import com.custommapsapp.android.kml.KmlInfo;
import com.custommapsapp.android.kml.KmlParser;
import com.custommapsapp.android.kml.KmzFile;
import android.util.Log;
import java.io.File;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.List;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
/**
* MapCatalog keeps track of maps (GroundOverlays) stored in a directory.
*
* @author Marko Teittinen
*/
public class MapCatalog {
private static final String LOG_TAG = "Custom Maps";
private File dataDir;
private List<GroundOverlay> allMaps = new ArrayList<GroundOverlay>();
private List<GroundOverlay> inMaps = new ArrayList<GroundOverlay>();
private List<GroundOverlay> nearMaps = new ArrayList<GroundOverlay>();
private List<GroundOverlay> farMaps = new ArrayList<GroundOverlay>();
/**
* Creates a new MapCatalog that contains all maps in a directory.
*
* @param dataDir directory that holds the maps of this catalog
*/
public MapCatalog(File dataDir) {
this.dataDir = dataDir;
refreshCatalog();
}
/**
* Parses the given KML/KMZ file and returns a map from there.
*
* @param mapFile containing GroundOverlay definitions
* @return GroundOverlay from the file, or {@code null} if none found
*/
public GroundOverlay parseLocalFile(File mapFile) {
Collection<KmlInfo> siblings = findKmlData(mapFile.getParentFile());
KmlParser parser = new KmlParser();
for (KmlInfo sibling : siblings) {
if (sibling.getFile().getName().equals(mapFile.getName())) {
// Found the requested file, attempt to parse it
try {
Iterable<GroundOverlay> overlays = parser.readFile(sibling.getKmlReader());
// Parsing successful, if any maps were found return first
Iterator<GroundOverlay> iter = overlays.iterator();
if (iter.hasNext()) {
GroundOverlay map = iter.next();
map.setKmlInfo(sibling);
return map;
}
} catch (Exception ex) {
Log.w(LOG_TAG, "Failed to parse KML file: " + sibling.toString(), ex);
}
}
}
// No GroundOverlays were found in the file
return null;
}
/**
* Checks if a map is part of multiple maps stored in single file.
*
* @param map GroundOverlay to be checked
* @return {@code true} if this catalog contains another map stored in the
* same KML or KMZ file
*/
public boolean isPartOfMapSet(GroundOverlay map) {
File mapFile = map.getKmlInfo().getFile();
for (GroundOverlay candidate : allMaps) {
// Check if the maps are stored in same file
if (mapFile.equals(candidate.getKmlInfo().getFile())) {
// Ignore exact match (with self)
if (!map.equals(candidate)) {
return true;
}
}
}
return false;
}
/**
* @return Iterable<GroundOverlay> listing all maps in catalog alphabetically
*/
public Iterable<GroundOverlay> getAllMapsSortedByName() {
Collections.sort(allMaps, mapSorter);
return allMaps;
}
/**
* Sorts list of maps in alphabetical order by name
*/
public void sortMapsByName(List<GroundOverlay> maps) {
Collections.sort(maps, mapSorter);
}
/**
* Finds all maps that contain a location. This is slightly faster than
* calling groupMapsByDistance(longitude, latitude) followed by
* getLocalMaps(). Result is the same though.
*
* @param longitude of the location
* @param latitude of the location
* @return Iterable<GroundOverlay> of the maps that contain the location
*/
public Iterable<GroundOverlay> getMapsContainingPoint(float longitude, float latitude) {
List<GroundOverlay> result = new ArrayList<GroundOverlay>();
for (GroundOverlay map : allMaps) {
if (map.contains(longitude, latitude)) {
result.add(map);
}
}
return result;
}
/**
* Groups maps by distance from the given point. Maps will be divided to three
* groups: maps containing the point, maps near the point (< 50km), maps far
* from point.
*
* @param longitude of the location
* @param latitude of the location
*/
public void groupMapsByDistance(float longitude, float latitude) {
inMaps.clear();
nearMaps.clear();
farMaps.clear();
for (GroundOverlay map : allMaps) {
float distance = map.getDistanceFrom(longitude, latitude);
if (distance == 0f) {
inMaps.add(map);
} else if (distance < 50000f) {
nearMaps.add(map);
} else {
farMaps.add(map);
}
}
}
/**
* Note: Call groupMapsByDistance(longitude, latitude) first.
*
* @return Iterable<GroundOverlay> of maps containing the location
*/
public Iterable<GroundOverlay> getLocalMaps() {
return inMaps;
}
/**
* Note: Call groupMapsByDistance(longitude, latitude) first.
*
* @return Iterable<GroundOverlay> of maps within 50 km (30 mi) of the
* location
*/
public Iterable<GroundOverlay> getNearMaps() {
return nearMaps;
}
/**
* Note: Call groupMapsByDistance(longitude, latitude) first.
*
* @return Iterable<GroundOverlay> of maps farther than 50 km (30 mi) of the
* location
*/
public Iterable<GroundOverlay> getFarMaps() {
return farMaps;
}
/**
* Updates the contents of this catalog by re-reading files in data directory.
*/
public void refreshCatalog() {
allMaps.clear();
inMaps.clear();
nearMaps.clear();
farMaps.clear();
KmlParser parser = new KmlParser();
for (KmlInfo kmlInfo : findKmlData()) {
try {
Iterable<GroundOverlay> overlays = parser.readFile(kmlInfo.getKmlReader());
for (GroundOverlay overlay : overlays) {
overlay.setKmlInfo(kmlInfo);
allMaps.add(overlay);
}
} catch (Exception ex) {
Log.w(LOG_TAG, "Failed to parse KML file: " + kmlInfo.toString(), ex);
}
}
}
/**
* @return Iterable<KmlInfo> of all available KML and KMZ files
*/
private Iterable<KmlInfo> findKmlData() {
List<KmlInfo> kmlData = new ArrayList<KmlInfo>();
kmlData.addAll(findKmlData(dataDir));
return kmlData;
}
/**
* @return Collection<KmlInfo> of all KML and KMZ files in a directory
*/
private Collection<KmlInfo> findKmlData(File directory) {
List<KmlInfo> kmlData = new ArrayList<KmlInfo>();
if (directory == null || !directory.exists() || !directory.isDirectory()) {
return kmlData;
}
File[] files = directory.listFiles();
for (File file : files) {
if (file.getName().endsWith(".kml")) {
kmlData.add(new KmlFile(file));
} else if (file.getName().endsWith(".kmz")) {
ZipFile kmzFile;
try {
kmzFile = new ZipFile(file);
} catch (Exception ex) {
// TODO: Add a notification dialog (?)
Log.w(LOG_TAG, "Not a valid KMZ file: " + file.getName(), ex);
continue;
}
Enumeration<? extends ZipEntry> kmzContents = kmzFile.entries();
while (kmzContents.hasMoreElements()) {
ZipEntry kmzItem = kmzContents.nextElement();
if (kmzItem.getName().endsWith(".kml")) {
kmlData.add(new KmzFile(kmzFile, kmzItem));
}
}
}
}
return kmlData;
}
/**
* Compares GroundOverlays by name (case insensitive).
*/
private Comparator<GroundOverlay> mapSorter = new Comparator<GroundOverlay>() {
@Override
public int compare(GroundOverlay m1, GroundOverlay m2) {
return m1.getName().compareToIgnoreCase(m2.getName());
}
};
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.catalina.mbeans;
import java.util.ArrayList;
import javax.management.MBeanException;
import javax.management.MalformedObjectNameException;
import javax.management.ObjectName;
import javax.management.RuntimeOperationsException;
import org.apache.catalina.deploy.ContextEnvironment;
import org.apache.catalina.deploy.ContextResource;
import org.apache.catalina.deploy.ContextResourceLink;
import org.apache.catalina.deploy.NamingResources;
import org.apache.tomcat.util.modeler.BaseModelMBean;
import org.apache.tomcat.util.modeler.ManagedBean;
import org.apache.tomcat.util.modeler.Registry;
/**
* <p>A <strong>ModelMBean</strong> implementation for the
* <code>org.apache.catalina.deploy.NamingResources</code> component.</p>
*
* @author Amy Roh
*/
public class NamingResourcesMBean extends BaseModelMBean {
// ----------------------------------------------------------- Constructors
/**
* Construct a <code>ModelMBean</code> with default
* <code>ModelMBeanInfo</code> information.
*
* @exception MBeanException if the initializer of an object
* throws an exception
* @exception RuntimeOperationsException if an IllegalArgumentException
* occurs
*/
public NamingResourcesMBean()
throws MBeanException, RuntimeOperationsException {
super();
}
// ----------------------------------------------------- Instance Variables
/**
* The configuration information registry for our managed beans.
*/
protected Registry registry = MBeanUtils.createRegistry();
/**
* The <code>ManagedBean</code> information describing this MBean.
*/
protected ManagedBean managed =
registry.findManagedBean("NamingResources");
// ------------------------------------------------------------- Attributes
/**
* Return the MBean Names of the set of defined environment entries for
* this web application
*/
public String[] getEnvironments() {
ContextEnvironment[] envs =
((NamingResources)this.resource).findEnvironments();
ArrayList<String> results = new ArrayList<String>();
for (int i = 0; i < envs.length; i++) {
try {
ObjectName oname =
MBeanUtils.createObjectName(managed.getDomain(), envs[i]);
results.add(oname.toString());
} catch (MalformedObjectNameException e) {
IllegalArgumentException iae = new IllegalArgumentException
("Cannot create object name for environment " + envs[i]);
iae.initCause(e);
throw iae;
}
}
return results.toArray(new String[results.size()]);
}
/**
* Return the MBean Names of all the defined resource references for this
* application.
*/
public String[] getResources() {
ContextResource[] resources =
((NamingResources)this.resource).findResources();
ArrayList<String> results = new ArrayList<String>();
for (int i = 0; i < resources.length; i++) {
try {
ObjectName oname =
MBeanUtils.createObjectName(managed.getDomain(), resources[i]);
results.add(oname.toString());
} catch (MalformedObjectNameException e) {
IllegalArgumentException iae = new IllegalArgumentException
("Cannot create object name for resource " + resources[i]);
iae.initCause(e);
throw iae;
}
}
return results.toArray(new String[results.size()]);
}
/**
* Return the MBean Names of all the defined resource link references for
* this application.
*/
public String[] getResourceLinks() {
ContextResourceLink[] resourceLinks =
((NamingResources)this.resource).findResourceLinks();
ArrayList<String> results = new ArrayList<String>();
for (int i = 0; i < resourceLinks.length; i++) {
try {
ObjectName oname =
MBeanUtils.createObjectName(managed.getDomain(), resourceLinks[i]);
results.add(oname.toString());
} catch (MalformedObjectNameException e) {
IllegalArgumentException iae = new IllegalArgumentException
("Cannot create object name for resource " + resourceLinks[i]);
iae.initCause(e);
throw iae;
}
}
return results.toArray(new String[results.size()]);
}
// ------------------------------------------------------------- Operations
/**
* Add an environment entry for this web application.
*
* @param envName New environment entry name
* @param type The type of the new environment entry
* @param value The value of the new environment entry
*/
public String addEnvironment(String envName, String type, String value)
throws MalformedObjectNameException {
NamingResources nresources = (NamingResources) this.resource;
if (nresources == null) {
return null;
}
ContextEnvironment env = nresources.findEnvironment(envName);
if (env != null) {
throw new IllegalArgumentException
("Invalid environment name - already exists '" + envName + "'");
}
env = new ContextEnvironment();
env.setName(envName);
env.setType(type);
env.setValue(value);
nresources.addEnvironment(env);
// Return the corresponding MBean name
ManagedBean managed = registry.findManagedBean("ContextEnvironment");
ObjectName oname =
MBeanUtils.createObjectName(managed.getDomain(), env);
return (oname.toString());
}
/**
* Add a resource reference for this web application.
*
* @param resourceName New resource reference name
* @param type New resource reference type
*/
public String addResource(String resourceName, String type)
throws MalformedObjectNameException {
NamingResources nresources = (NamingResources) this.resource;
if (nresources == null) {
return null;
}
ContextResource resource = nresources.findResource(resourceName);
if (resource != null) {
throw new IllegalArgumentException
("Invalid resource name - already exists'" + resourceName + "'");
}
resource = new ContextResource();
resource.setName(resourceName);
resource.setType(type);
nresources.addResource(resource);
// Return the corresponding MBean name
ManagedBean managed = registry.findManagedBean("ContextResource");
ObjectName oname =
MBeanUtils.createObjectName(managed.getDomain(), resource);
return (oname.toString());
}
/**
* Add a resource link reference for this web application.
*
* @param resourceLinkName New resource link reference name
* @param type New resource link reference type
*/
public String addResourceLink(String resourceLinkName, String type)
throws MalformedObjectNameException {
NamingResources nresources = (NamingResources) this.resource;
if (nresources == null) {
return null;
}
ContextResourceLink resourceLink =
nresources.findResourceLink(resourceLinkName);
if (resourceLink != null) {
throw new IllegalArgumentException
("Invalid resource link name - already exists'" +
resourceLinkName + "'");
}
resourceLink = new ContextResourceLink();
resourceLink.setName(resourceLinkName);
resourceLink.setType(type);
nresources.addResourceLink(resourceLink);
// Return the corresponding MBean name
ManagedBean managed = registry.findManagedBean("ContextResourceLink");
ObjectName oname =
MBeanUtils.createObjectName(managed.getDomain(), resourceLink);
return (oname.toString());
}
/**
* Remove any environment entry with the specified name.
*
* @param envName Name of the environment entry to remove
*/
public void removeEnvironment(String envName) {
NamingResources nresources = (NamingResources) this.resource;
if (nresources == null) {
return;
}
ContextEnvironment env = nresources.findEnvironment(envName);
if (env == null) {
throw new IllegalArgumentException
("Invalid environment name '" + envName + "'");
}
nresources.removeEnvironment(envName);
}
/**
* Remove any resource reference with the specified name.
*
* @param resourceName Name of the resource reference to remove
*/
public void removeResource(String resourceName) {
resourceName = ObjectName.unquote(resourceName);
NamingResources nresources = (NamingResources) this.resource;
if (nresources == null) {
return;
}
ContextResource resource = nresources.findResource(resourceName);
if (resource == null) {
throw new IllegalArgumentException
("Invalid resource name '" + resourceName + "'");
}
nresources.removeResource(resourceName);
}
/**
* Remove any resource link reference with the specified name.
*
* @param resourceLinkName Name of the resource link reference to remove
*/
public void removeResourceLink(String resourceLinkName) {
resourceLinkName = ObjectName.unquote(resourceLinkName);
NamingResources nresources = (NamingResources) this.resource;
if (nresources == null) {
return;
}
ContextResourceLink resourceLink =
nresources.findResourceLink(resourceLinkName);
if (resourceLink == null) {
throw new IllegalArgumentException
("Invalid resource Link name '" + resourceLinkName + "'");
}
nresources.removeResourceLink(resourceLinkName);
}
}
| |
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. See accompanying LICENSE file.
*/
package org.apache.hadoop.security.authentication.server;
import org.apache.hadoop.security.authentication.client.AuthenticationException;
import org.apache.hadoop.security.authentication.client.KerberosAuthenticator;
import com.sun.security.auth.module.Krb5LoginModule;
import org.apache.commons.codec.binary.Base64;
import org.apache.hadoop.security.KerberosName;
import org.ietf.jgss.GSSContext;
import org.ietf.jgss.GSSCredential;
import org.ietf.jgss.GSSManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.security.auth.Subject;
import javax.security.auth.kerberos.KerberosPrincipal;
import javax.security.auth.login.AppConfigurationEntry;
import javax.security.auth.login.Configuration;
import javax.security.auth.login.LoginContext;
import javax.security.auth.login.LoginException;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.File;
import java.io.IOException;
import java.security.Principal;
import java.security.PrivilegedActionException;
import java.security.PrivilegedExceptionAction;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
/**
* The {@link KerberosAuthenticationHandler} implements the Kerberos SPNEGO authentication mechanism for HTTP.
* <p/>
* The supported configuration properties are:
* <ul>
* <li>kerberos.principal: the Kerberos principal to used by the server. As stated by the Kerberos SPNEGO
* specification, it should be <code>HTTP/${HOSTNAME}@{REALM}</code>. The realm can be omitted from the
* principal as the JDK GSS libraries will use the realm name of the configured default realm.
* It does not have a default value.</li>
* <li>kerberos.keytab: the keytab file containing the credentials for the Kerberos principal.
* It does not have a default value.</li>
* <li>kerberos.name.rules: kerberos names rules to resolve principal names, see
* {@link KerberosName#setRules(String)}</li>
* </ul>
*/
public class KerberosAuthenticationHandler implements AuthenticationHandler {
private static Logger LOG = LoggerFactory.getLogger(KerberosAuthenticationHandler.class);
/**
* Kerberos context configuration for the JDK GSS library.
*/
private static class KerberosConfiguration extends Configuration {
private String keytab;
private String principal;
public KerberosConfiguration(String keytab, String principal) {
this.keytab = keytab;
this.principal = principal;
}
@Override
public AppConfigurationEntry[] getAppConfigurationEntry(String name) {
Map<String, String> options = new HashMap<String, String>();
options.put("keyTab", keytab);
options.put("principal", principal);
options.put("useKeyTab", "true");
options.put("storeKey", "true");
options.put("doNotPrompt", "true");
options.put("useTicketCache", "true");
options.put("renewTGT", "true");
options.put("refreshKrb5Config", "true");
options.put("isInitiator", "false");
String ticketCache = System.getenv("KRB5CCNAME");
if (ticketCache != null) {
options.put("ticketCache", ticketCache);
}
if (LOG.isDebugEnabled()) {
options.put("debug", "true");
}
return new AppConfigurationEntry[]{
new AppConfigurationEntry(Krb5LoginModule.class.getName(),
AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
options),};
}
}
/**
* Constant that identifies the authentication mechanism.
*/
public static final String TYPE = "kerberos";
/**
* Constant for the configuration property that indicates the kerberos principal.
*/
public static final String PRINCIPAL = TYPE + ".principal";
/**
* Constant for the configuration property that indicates the keytab file path.
*/
public static final String KEYTAB = TYPE + ".keytab";
/**
* Constant for the configuration property that indicates the Kerberos name
* rules for the Kerberos principals.
*/
public static final String NAME_RULES = TYPE + ".name.rules";
private String principal;
private String keytab;
private GSSManager gssManager;
private LoginContext loginContext;
/**
* Initializes the authentication handler instance.
* <p/>
* It creates a Kerberos context using the principal and keytab specified in the configuration.
* <p/>
* This method is invoked by the {@link AuthenticationFilter#init} method.
*
* @param config configuration properties to initialize the handler.
*
* @throws ServletException thrown if the handler could not be initialized.
*/
@Override
public void init(Properties config) throws ServletException {
try {
principal = config.getProperty(PRINCIPAL, principal);
if (principal == null || principal.trim().length() == 0) {
throw new ServletException("Principal not defined in configuration");
}
keytab = config.getProperty(KEYTAB, keytab);
if (keytab == null || keytab.trim().length() == 0) {
throw new ServletException("Keytab not defined in configuration");
}
if (!new File(keytab).exists()) {
throw new ServletException("Keytab does not exist: " + keytab);
}
String nameRules = config.getProperty(NAME_RULES, null);
if (nameRules != null) {
KerberosName.setRules(nameRules);
}
Set<Principal> principals = new HashSet<Principal>();
principals.add(new KerberosPrincipal(principal));
Subject subject = new Subject(false, principals, new HashSet<Object>(), new HashSet<Object>());
KerberosConfiguration kerberosConfiguration = new KerberosConfiguration(keytab, principal);
loginContext = new LoginContext("", subject, null, kerberosConfiguration);
loginContext.login();
Subject serverSubject = loginContext.getSubject();
try {
gssManager = Subject.doAs(serverSubject, new PrivilegedExceptionAction<GSSManager>() {
@Override
public GSSManager run() throws Exception {
return GSSManager.getInstance();
}
});
} catch (PrivilegedActionException ex) {
throw ex.getException();
}
LOG.info("Initialized, principal [{}] from keytab [{}]", principal, keytab);
} catch (Exception ex) {
throw new ServletException(ex);
}
}
/**
* Releases any resources initialized by the authentication handler.
* <p/>
* It destroys the Kerberos context.
*/
@Override
public void destroy() {
try {
if (loginContext != null) {
loginContext.logout();
loginContext = null;
}
} catch (LoginException ex) {
LOG.warn(ex.getMessage(), ex);
}
}
/**
* Returns the authentication type of the authentication handler, 'kerberos'.
* <p/>
*
* @return the authentication type of the authentication handler, 'kerberos'.
*/
@Override
public String getType() {
return TYPE;
}
/**
* Returns the Kerberos principal used by the authentication handler.
*
* @return the Kerberos principal used by the authentication handler.
*/
protected String getPrincipal() {
return principal;
}
/**
* Returns the keytab used by the authentication handler.
*
* @return the keytab used by the authentication handler.
*/
protected String getKeytab() {
return keytab;
}
/**
* It enforces the the Kerberos SPNEGO authentication sequence returning an {@link AuthenticationToken} only
* after the Kerberos SPNEGO sequence has completed successfully.
* <p/>
*
* @param request the HTTP client request.
* @param response the HTTP client response.
*
* @return an authentication token if the Kerberos SPNEGO sequence is complete and valid,
* <code>null</code> if it is in progress (in this case the handler handles the response to the client).
*
* @throws IOException thrown if an IO error occurred.
* @throws AuthenticationException thrown if Kerberos SPNEGO sequence failed.
*/
@Override
public AuthenticationToken authenticate(HttpServletRequest request, final HttpServletResponse response)
throws IOException, AuthenticationException {
AuthenticationToken token = null;
String authorization = request.getHeader(KerberosAuthenticator.AUTHORIZATION);
if (authorization == null || !authorization.startsWith(KerberosAuthenticator.NEGOTIATE)) {
response.setHeader(KerberosAuthenticator.WWW_AUTHENTICATE, KerberosAuthenticator.NEGOTIATE);
response.setStatus(HttpServletResponse.SC_UNAUTHORIZED);
if (authorization == null) {
LOG.trace("SPNEGO starting");
} else {
LOG.warn("'" + KerberosAuthenticator.AUTHORIZATION + "' does not start with '" +
KerberosAuthenticator.NEGOTIATE + "' : {}", authorization);
}
} else {
authorization = authorization.substring(KerberosAuthenticator.NEGOTIATE.length()).trim();
final Base64 base64 = new Base64(0);
final byte[] clientToken = base64.decode(authorization);
Subject serverSubject = loginContext.getSubject();
try {
token = Subject.doAs(serverSubject, new PrivilegedExceptionAction<AuthenticationToken>() {
@Override
public AuthenticationToken run() throws Exception {
AuthenticationToken token = null;
GSSContext gssContext = null;
try {
gssContext = gssManager.createContext((GSSCredential) null);
byte[] serverToken = gssContext.acceptSecContext(clientToken, 0, clientToken.length);
if (serverToken != null && serverToken.length > 0) {
String authenticate = base64.encodeToString(serverToken);
response.setHeader(KerberosAuthenticator.WWW_AUTHENTICATE,
KerberosAuthenticator.NEGOTIATE + " " + authenticate);
}
if (!gssContext.isEstablished()) {
response.setStatus(HttpServletResponse.SC_UNAUTHORIZED);
LOG.trace("SPNEGO in progress");
} else {
String clientPrincipal = gssContext.getSrcName().toString();
KerberosName kerberosName = new KerberosName(clientPrincipal);
String userName = kerberosName.getShortName();
token = new AuthenticationToken(userName, clientPrincipal, TYPE);
response.setStatus(HttpServletResponse.SC_OK);
LOG.trace("SPNEGO completed for principal [{}]", clientPrincipal);
}
} finally {
if (gssContext != null) {
gssContext.dispose();
}
}
return token;
}
});
} catch (PrivilegedActionException ex) {
if (ex.getException() instanceof IOException) {
throw (IOException) ex.getException();
}
else {
throw new AuthenticationException(ex.getException());
}
}
}
return token;
}
}
| |
// Copyright 2000-2021 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeInsight.daemon.impl;
import com.intellij.codeHighlighting.HighlightDisplayLevel;
import com.intellij.codeHighlighting.Pass;
import com.intellij.codeInsight.daemon.DaemonBundle;
import com.intellij.codeInsight.daemon.HighlightDisplayKey;
import com.intellij.codeInsight.daemon.impl.analysis.DaemonTooltipsUtil;
import com.intellij.codeInsight.daemon.impl.analysis.HighlightingLevelManager;
import com.intellij.codeInsight.daemon.impl.quickfix.QuickFixAction;
import com.intellij.codeInsight.intention.EmptyIntentionAction;
import com.intellij.codeInsight.intention.IntentionAction;
import com.intellij.codeInspection.*;
import com.intellij.codeInspection.ex.*;
import com.intellij.codeInspection.ui.InspectionToolPresentation;
import com.intellij.concurrency.JobLauncher;
import com.intellij.diagnostic.PluginException;
import com.intellij.injected.editor.DocumentWindow;
import com.intellij.lang.Language;
import com.intellij.lang.annotation.HighlightSeverity;
import com.intellij.lang.annotation.ProblemGroup;
import com.intellij.lang.injection.InjectedLanguageManager;
import com.intellij.openapi.actionSystem.IdeActions;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.RangeMarker;
import com.intellij.openapi.editor.colors.EditorColorsScheme;
import com.intellij.openapi.editor.colors.TextAttributesKey;
import com.intellij.openapi.editor.markup.TextAttributes;
import com.intellij.openapi.keymap.Keymap;
import com.intellij.openapi.keymap.KeymapManager;
import com.intellij.openapi.keymap.KeymapUtil;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.util.NlsContexts;
import com.intellij.openapi.util.NlsSafe;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.profile.codeInspection.ProjectInspectionProfileManager;
import com.intellij.psi.*;
import com.intellij.util.*;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.Interner;
import com.intellij.util.containers.SmartHashSet;
import com.intellij.xml.util.XmlStringUtil;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.function.Function;
import java.util.function.Predicate;
import static com.intellij.codeInspection.ex.InspectListener.InspectionKind.LOCAL;
import static com.intellij.codeInspection.ex.InspectListener.InspectionKind.LOCAL_PRIORITY;
import static com.intellij.codeInspection.ex.InspectionEventsKt.reportWhenInspectionFinished;
public class LocalInspectionsPass extends ProgressableTextEditorHighlightingPass {
private static final Logger LOG = Logger.getInstance(LocalInspectionsPass.class);
public static final TextRange EMPTY_PRIORITY_RANGE = TextRange.EMPTY_RANGE;
private static final Predicate<PsiFile> SHOULD_INSPECT_FILTER = file -> HighlightingLevelManager.getInstance(file.getProject()).shouldInspect(file);
private final TextRange myPriorityRange;
private final boolean myIgnoreSuppressed;
private final ConcurrentMap<PsiFile, List<InspectionResult>> result = new ConcurrentHashMap<>();
private final InspectListener myInspectTopicPublisher;
private volatile List<HighlightInfo> myInfos = Collections.emptyList();
private final String myShortcutText;
private final SeverityRegistrar mySeverityRegistrar;
private final InspectionProfileWrapper myProfileWrapper;
private final Map<String, Set<PsiElement>> mySuppressedElements = new ConcurrentHashMap<>();
private final boolean myInspectInjectedPsi;
public LocalInspectionsPass(@NotNull PsiFile file,
@NotNull Document document,
int startOffset,
int endOffset,
@NotNull TextRange priorityRange,
boolean ignoreSuppressed,
@NotNull HighlightInfoProcessor highlightInfoProcessor, boolean inspectInjectedPsi) {
super(file.getProject(), document, getPresentableNameText(), file, null, new TextRange(startOffset, endOffset), true, highlightInfoProcessor);
assert file.isPhysical() : "can't inspect non-physical file: " + file + "; " + file.getVirtualFile();
myPriorityRange = priorityRange;
myIgnoreSuppressed = ignoreSuppressed;
setId(Pass.LOCAL_INSPECTIONS);
final KeymapManager keymapManager = KeymapManager.getInstance();
if (keymapManager != null) {
final Keymap keymap = keymapManager.getActiveKeymap();
myShortcutText = "(" + KeymapUtil.getShortcutsText(keymap.getShortcuts(IdeActions.ACTION_SHOW_ERROR_DESCRIPTION)) + ")";
}
else {
myShortcutText = "";
}
InspectionProfileImpl profileToUse = ProjectInspectionProfileManager.getInstance(myProject).getCurrentProfile();
Function<InspectionProfileImpl, InspectionProfileWrapper> custom = file.getUserData(InspectionProfileWrapper.CUSTOMIZATION_KEY);
myProfileWrapper = custom == null ? new InspectionProfileWrapper(profileToUse) : custom.apply(profileToUse);
assert myProfileWrapper != null;
mySeverityRegistrar = myProfileWrapper.getProfileManager().getSeverityRegistrar();
myInspectInjectedPsi = inspectInjectedPsi;
myInspectTopicPublisher = myProject.getMessageBus().syncPublisher(GlobalInspectionContextEx.INSPECT_TOPIC);
// initial guess
setProgressLimit(300 * 2);
}
private @NotNull PsiFile getFile() {
return myFile;
}
@Override
protected void collectInformationWithProgress(@NotNull ProgressIndicator progress) {
try {
if (!HighlightingLevelManager.getInstance(myProject).shouldInspect(getFile())) {
return;
}
inspect(getInspectionTools(myProfileWrapper), InspectionManager.getInstance(myProject), true, progress);
}
finally {
disposeDescriptors();
}
}
private void disposeDescriptors() {
result.clear();
}
private static final Set<String> ourToolsWithInformationProblems = new HashSet<>();
public void doInspectInBatch(final @NotNull GlobalInspectionContextImpl context,
final @NotNull InspectionManager iManager,
final @NotNull List<? extends LocalInspectionToolWrapper> toolWrappers) {
final ProgressIndicator progress = ProgressManager.getInstance().getProgressIndicator();
inspect(new ArrayList<>(toolWrappers), iManager, false, progress);
addDescriptorsFromInjectedResults(context);
List<InspectionResult> resultList = result.get(getFile());
if (resultList == null) return;
for (InspectionResult inspectionResult : resultList) {
LocalInspectionToolWrapper toolWrapper = inspectionResult.tool;
final String shortName = toolWrapper.getShortName();
for (ProblemDescriptor descriptor : inspectionResult.foundProblems) {
if (descriptor.getHighlightType() == ProblemHighlightType.INFORMATION) {
if (ourToolsWithInformationProblems.add(shortName)) {
String message = "Tool #" + shortName + " registers INFORMATION level problem in batch mode on " + getFile() + ". " +
"INFORMATION level 'warnings' are invisible in the editor and should not become visible in batch mode. " +
"Moreover, cause INFORMATION level fixes act more like intention actions, they could e.g. change semantics and " +
"thus should not be suggested for batch transformations";
LocalInspectionEP extension = toolWrapper.getExtension();
if (extension != null) {
LOG.error(new PluginException(message, extension.getPluginDescriptor().getPluginId()));
}
else {
LOG.error(message);
}
}
continue;
}
addDescriptors(toolWrapper, descriptor, context);
}
}
}
private void addDescriptors(@NotNull LocalInspectionToolWrapper toolWrapper,
@NotNull ProblemDescriptor descriptor,
@NotNull GlobalInspectionContextImpl context) {
InspectionToolPresentation toolPresentation = context.getPresentation(toolWrapper);
BatchModeDescriptorsUtil.addProblemDescriptors(Collections.singletonList(descriptor), toolPresentation, myIgnoreSuppressed,
context,
toolWrapper.getTool());
}
private void addDescriptorsFromInjectedResults(@NotNull GlobalInspectionContextImpl context) {
for (Map.Entry<PsiFile, List<InspectionResult>> entry : result.entrySet()) {
PsiFile file = entry.getKey();
if (file == getFile()) continue; // not injected
List<InspectionResult> resultList = entry.getValue();
for (InspectionResult inspectionResult : resultList) {
LocalInspectionToolWrapper toolWrapper = inspectionResult.tool;
for (ProblemDescriptor descriptor : inspectionResult.foundProblems) {
PsiElement psiElement = descriptor.getPsiElement();
if (psiElement == null) continue;
if (toolWrapper.getTool().isSuppressedFor(psiElement)) continue;
addDescriptors(toolWrapper, descriptor, context);
}
}
}
}
private void inspect(@NotNull List<? extends LocalInspectionToolWrapper> toolWrappers,
final @NotNull InspectionManager iManager,
final boolean isOnTheFly,
final @NotNull ProgressIndicator progress) {
if (toolWrappers.isEmpty()) return;
List<Divider.DividedElements> allDivided = new ArrayList<>();
Divider.divideInsideAndOutsideAllRoots(myFile, myRestrictRange, myPriorityRange, SHOULD_INSPECT_FILTER, new CommonProcessors.CollectProcessor<>(allDivided));
List<PsiElement> inside = ContainerUtil.concat((List<List<PsiElement>>)ContainerUtil.map(allDivided, d -> d.inside));
List<PsiElement> outside = ContainerUtil.concat((List<List<PsiElement>>)ContainerUtil.map(allDivided, d -> ContainerUtil.concat(d.outside, d.parents)));
setProgressLimit(toolWrappers.size() * 2L);
final LocalInspectionToolSession session = new LocalInspectionToolSession(getFile(), myRestrictRange.getStartOffset(), myRestrictRange.getEndOffset());
List<InspectionContext> init = visitPriorityElementsAndInit(
InspectionEngine.filterToolsApplicableByLanguage(toolWrappers, InspectionEngine.calcElementDialectIds(inside, outside)),
iManager, isOnTheFly, progress, inside, session);
Set<PsiFile> alreadyVisitedInjected = inspectInjectedPsi(inside, isOnTheFly, progress, iManager, true, toolWrappers, Collections.emptySet());
visitRestElementsAndCleanup(progress, outside, session, init, isOnTheFly);
inspectInjectedPsi(outside, isOnTheFly, progress, iManager, false, toolWrappers, alreadyVisitedInjected);
ProgressManager.checkCanceled();
myInfos = new ArrayList<>();
addHighlightsFromResults(myInfos);
if (isOnTheFly) {
highlightRedundantSuppressions(toolWrappers, iManager, inside, outside);
}
}
private void highlightRedundantSuppressions(@NotNull List<? extends LocalInspectionToolWrapper> toolWrappers,
@NotNull InspectionManager iManager,
@NotNull List<? extends PsiElement> inside,
@NotNull List<? extends PsiElement> outside) {
HighlightDisplayKey key = HighlightDisplayKey.find(RedundantSuppressInspection.SHORT_NAME);
final InspectionProfile inspectionProfile = myProfileWrapper.getInspectionProfile();
if (key != null && inspectionProfile.isToolEnabled(key, getFile())) {
InspectionToolWrapper<?,?> toolWrapper = inspectionProfile.getInspectionTool(RedundantSuppressInspection.SHORT_NAME, getFile());
Language fileLanguage = getFile().getLanguage();
InspectionSuppressor suppressor = LanguageInspectionSuppressors.INSTANCE.forLanguage(fileLanguage);
if (suppressor instanceof RedundantSuppressionDetector) {
if (toolWrappers.stream().anyMatch(LocalInspectionToolWrapper::runForWholeFile)) {
return;
}
Set<String> activeTools = new HashSet<>();
for (LocalInspectionToolWrapper tool : toolWrappers) {
if (tool.isUnfair() || !tool.isApplicable(fileLanguage) || myProfileWrapper.getInspectionTool(tool.getShortName(), myFile) instanceof GlobalInspectionToolWrapper) {
continue;
}
activeTools.add(tool.getID());
ContainerUtil.addIfNotNull(activeTools, tool.getAlternativeID());
InspectionElementsMerger elementsMerger = InspectionElementsMerger.getMerger(tool.getShortName());
if (elementsMerger != null) {
activeTools.addAll(Arrays.asList(elementsMerger.getSuppressIds()));
}
}
LocalInspectionTool
localTool = ((RedundantSuppressInspection)toolWrapper.getTool()).createLocalTool((RedundantSuppressionDetector)suppressor, mySuppressedElements, activeTools);
ProblemsHolder holder = new ProblemsHolder(iManager, getFile(), true);
PsiElementVisitor visitor = localTool.buildVisitor(holder, true);
InspectionEngine.acceptElements(inside, visitor);
InspectionEngine.acceptElements(outside, visitor);
HighlightSeverity severity = myProfileWrapper.getErrorLevel(key, getFile()).getSeverity();
for (ProblemDescriptor descriptor : holder.getResults()) {
ProgressManager.checkCanceled();
PsiElement element = descriptor.getPsiElement();
if (element != null) {
Document thisDocument = documentManager.getDocument(getFile());
createHighlightsForDescriptor(myInfos, emptyActionRegistered, ilManager, getFile(), thisDocument,
new LocalInspectionToolWrapper(localTool), severity, descriptor, element, false);
}
}
}
}
}
private @NotNull List<InspectionContext> visitPriorityElementsAndInit(@NotNull List<? extends LocalInspectionToolWrapper> wrappers,
final @NotNull InspectionManager iManager,
final boolean isOnTheFly,
final @NotNull ProgressIndicator indicator,
final @NotNull List<? extends PsiElement> elements,
final @NotNull LocalInspectionToolSession session) {
final List<InspectionContext> init = new ArrayList<>();
PsiFile file = session.getFile();
Processor<LocalInspectionToolWrapper> processor = toolWrapper ->
AstLoadingFilter.disallowTreeLoading(() -> AstLoadingFilter.<Boolean, RuntimeException>forceAllowTreeLoading(file, () -> {
if (elements.isEmpty() || isOnTheFly) {
runToolOnElements(toolWrapper, iManager, isOnTheFly, indicator, elements, session, init);
} else {
reportWhenInspectionFinished(
myInspectTopicPublisher,
toolWrapper,
LOCAL_PRIORITY,
() -> {
runToolOnElements(toolWrapper, iManager, false, indicator, elements, session, init);
});
}
return true;
}));
if (!JobLauncher.getInstance().invokeConcurrentlyUnderProgress(wrappers, indicator, processor)) {
throw new ProcessCanceledException();
}
return init;
}
private void runToolOnElements(final @NotNull LocalInspectionToolWrapper toolWrapper,
final @NotNull InspectionManager iManager,
final boolean isOnTheFly,
final @NotNull ProgressIndicator indicator,
final @NotNull List<? extends PsiElement> elements,
final @NotNull LocalInspectionToolSession session,
@NotNull List<? super InspectionContext> init) {
ProgressManager.checkCanceled();
ApplicationManager.getApplication().assertReadAccessAllowed();
final LocalInspectionTool tool = toolWrapper.getTool();
final boolean[] applyIncrementally = {isOnTheFly};
ProblemsHolder holder = new ProblemsHolder(iManager, getFile(), isOnTheFly) {
@Override
public void registerProblem(@NotNull ProblemDescriptor descriptor) {
super.registerProblem(descriptor);
if (applyIncrementally[0]) {
addDescriptorIncrementally(descriptor, toolWrapper, indicator);
}
}
};
PsiElementVisitor visitor = InspectionEngine.createVisitorAndAcceptElements(tool, holder, isOnTheFly, session, elements);
// if inspection returned empty visitor then it should be skipped
if (visitor != PsiElementVisitor.EMPTY_VISITOR) {
synchronized (init) {
init.add(new InspectionContext(toolWrapper, holder, holder.getResultCount(), visitor));
}
}
advanceProgress(1);
if (holder.hasResults()) {
appendDescriptors(getFile(), holder.getResults(), toolWrapper);
}
applyIncrementally[0] = false; // do not apply incrementally outside visible range
}
private void visitRestElementsAndCleanup(final @NotNull ProgressIndicator indicator,
final @NotNull List<? extends PsiElement> elements,
final @NotNull LocalInspectionToolSession session,
@NotNull List<? extends InspectionContext> init,
final boolean isOnTheFly) {
Processor<InspectionContext> processor =
context -> {
ProgressManager.checkCanceled();
ApplicationManager.getApplication().assertReadAccessAllowed();
if (isOnTheFly) {
AstLoadingFilter.disallowTreeLoading(() -> InspectionEngine.acceptElements(elements, context.visitor));
} else {
reportWhenInspectionFinished(
myInspectTopicPublisher,
context.tool,
LOCAL,
() -> {
AstLoadingFilter.disallowTreeLoading(() -> InspectionEngine.acceptElements(elements, context.visitor));
});
}
advanceProgress(1);
context.tool.getTool().inspectionFinished(session, context.holder);
if (context.holder.hasResults()) {
List<ProblemDescriptor> allProblems = context.holder.getResults();
List<ProblemDescriptor> restProblems = allProblems.subList(context.problemsSize, allProblems.size());
appendDescriptors(getFile(), restProblems, context.tool);
}
return true;
};
if (!JobLauncher.getInstance().invokeConcurrentlyUnderProgress(init, indicator, processor)) {
throw new ProcessCanceledException();
}
}
private @NotNull Set<PsiFile> inspectInjectedPsi(final @NotNull List<? extends PsiElement> elements,
final boolean onTheFly,
final @NotNull ProgressIndicator indicator,
final @NotNull InspectionManager iManager,
final boolean inVisibleRange,
final @NotNull List<? extends LocalInspectionToolWrapper> wrappers,
@NotNull Set<? extends PsiFile> alreadyVisitedInjected) {
if (!myInspectInjectedPsi) return Collections.emptySet();
Set<PsiFile> injected = new HashSet<>();
for (PsiElement element : elements) {
PsiFile containingFile = getFile();
InjectedLanguageManager.getInstance(containingFile.getProject()).enumerateEx(element, containingFile, false,
(injectedPsi, places) -> injected.add(injectedPsi));
}
injected.removeAll(alreadyVisitedInjected);
if (!injected.isEmpty()) {
Processor<PsiFile> processor = injectedPsi -> {
doInspectInjectedPsi(injectedPsi, onTheFly, indicator, iManager, inVisibleRange, wrappers);
return true;
};
if (!JobLauncher.getInstance().invokeConcurrentlyUnderProgress(new ArrayList<>(injected), indicator, processor)) {
throw new ProcessCanceledException();
}
}
return injected;
}
private static final TextAttributes NONEMPTY_TEXT_ATTRIBUTES = new TextAttributes() {
@Override
public boolean isEmpty() {
return false;
}
};
private @Nullable HighlightInfo highlightInfoFromDescriptor(@NotNull ProblemDescriptor problemDescriptor,
@NotNull HighlightInfoType highlightInfoType,
@NotNull @NlsContexts.DetailedDescription String message,
@Nullable @NlsContexts.Tooltip String toolTip,
@NotNull PsiElement psiElement,
@NotNull List<IntentionAction> quickFixes,
@NotNull String toolID) {
TextRange textRange = ((ProblemDescriptorBase)problemDescriptor).getTextRange();
if (textRange == null) return null;
boolean isFileLevel = psiElement instanceof PsiFile && textRange.equals(psiElement.getTextRange());
final HighlightSeverity severity = highlightInfoType.getSeverity(psiElement);
TextAttributesKey attributesKey = ((ProblemDescriptorBase)problemDescriptor).getEnforcedTextAttributes();
TextAttributes attributes = attributesKey == null || getColorsScheme() == null
? mySeverityRegistrar.getTextAttributesBySeverity(severity)
: getColorsScheme().getAttributes(attributesKey);
HighlightInfo.Builder b = HighlightInfo.newHighlightInfo(highlightInfoType)
.range(psiElement, textRange.getStartOffset(), textRange.getEndOffset())
.description(message)
.severity(severity)
.inspectionToolId(toolID);
if (toolTip != null) b.escapedToolTip(toolTip);
if (HighlightSeverity.INFORMATION.equals(severity) && attributes == null && toolTip == null && !quickFixes.isEmpty()) {
// Hack to avoid filtering this info out in HighlightInfoFilterImpl even though its attributes are empty.
// But it has quick fixes so it needs to be created.
attributes = NONEMPTY_TEXT_ATTRIBUTES;
}
if (attributes != null) b.textAttributes(attributes);
if (problemDescriptor.isAfterEndOfLine()) b.endOfLine();
if (isFileLevel) b.fileLevelAnnotation();
if (problemDescriptor.getProblemGroup() != null) b.problemGroup(problemDescriptor.getProblemGroup());
return b.create();
}
private final Map<TextRange, RangeMarker> ranges2markersCache = new HashMap<>(); // accessed in EDT only
private final InjectedLanguageManager ilManager = InjectedLanguageManager.getInstance(myProject);
private final List<HighlightInfo> infos = new ArrayList<>(2); // accessed in EDT only
private final PsiDocumentManager documentManager = PsiDocumentManager.getInstance(myProject);
private final Set<Pair<TextRange, String>> emptyActionRegistered = Collections.synchronizedSet(new HashSet<>());
private void addDescriptorIncrementally(final @NotNull ProblemDescriptor descriptor,
final @NotNull LocalInspectionToolWrapper tool,
final @NotNull ProgressIndicator indicator) {
if (myIgnoreSuppressed) {
LocalInspectionToolWrapper toolWrapper = tool;
PsiElement psiElement = descriptor.getPsiElement();
if (descriptor instanceof ProblemDescriptorWithReporterName) {
String reportingToolName = ((ProblemDescriptorWithReporterName)descriptor).getReportingToolName();
toolWrapper = (LocalInspectionToolWrapper)myProfileWrapper.getInspectionTool(reportingToolName, psiElement);
}
if (toolWrapper.getTool().isSuppressedFor(psiElement)) {
registerSuppressedElements(psiElement, toolWrapper.getID(), toolWrapper.getAlternativeID());
return;
}
}
ApplicationManager.getApplication().invokeLater(()->{
PsiElement psiElement = descriptor.getPsiElement();
if (psiElement == null) return;
PsiFile file = psiElement.getContainingFile();
Document thisDocument = documentManager.getDocument(file);
HighlightSeverity severity = myProfileWrapper.getErrorLevel(tool.getDisplayKey(), file).getSeverity();
infos.clear();
createHighlightsForDescriptor(infos, emptyActionRegistered, ilManager, file, thisDocument, tool, severity, descriptor, psiElement);
for (HighlightInfo info : infos) {
final EditorColorsScheme colorsScheme = getColorsScheme();
UpdateHighlightersUtil.addHighlighterToEditorIncrementally(myProject, myDocument, getFile(),
myRestrictRange.getStartOffset(),
myRestrictRange.getEndOffset(),
info, colorsScheme, getId(),
ranges2markersCache);
}
}, __->myProject.isDisposed() || indicator.isCanceled());
}
private void appendDescriptors(@NotNull PsiFile file, @NotNull List<? extends ProblemDescriptor> descriptors, @NotNull LocalInspectionToolWrapper tool) {
for (ProblemDescriptor descriptor : descriptors) {
if (descriptor == null) {
LOG.error("null descriptor. all descriptors(" + descriptors.size() +"): " +
descriptors + "; file: " + file + " (" + file.getVirtualFile() +"); tool: " + tool);
}
}
InspectionResult result = new InspectionResult(tool, descriptors);
appendResult(file, result);
}
private void appendResult(@NotNull PsiFile file, @NotNull InspectionResult result) {
List<InspectionResult> resultList = this.result.get(file);
if (resultList == null) {
resultList = ConcurrencyUtil.cacheOrGet(this.result, file, new ArrayList<>());
}
synchronized (resultList) {
resultList.add(result);
}
}
@Override
protected void applyInformationWithProgress() {
UpdateHighlightersUtil.setHighlightersToEditor(myProject, myDocument, myRestrictRange.getStartOffset(), myRestrictRange.getEndOffset(), myInfos, getColorsScheme(), getId());
}
private void addHighlightsFromResults(@NotNull List<? super HighlightInfo> outInfos) {
PsiDocumentManager documentManager = PsiDocumentManager.getInstance(myProject);
InjectedLanguageManager ilManager = InjectedLanguageManager.getInstance(myProject);
Set<Pair<TextRange, String>> emptyActionRegistered = new HashSet<>();
for (Map.Entry<PsiFile, List<InspectionResult>> entry : result.entrySet()) {
ProgressManager.checkCanceled();
PsiFile file = entry.getKey();
Document documentRange = documentManager.getDocument(file);
if (documentRange == null) continue;
List<InspectionResult> resultList = entry.getValue();
synchronized (resultList) {
for (InspectionResult inspectionResult : resultList) {
ProgressManager.checkCanceled();
LocalInspectionToolWrapper tool = inspectionResult.tool;
HighlightSeverity severity = myProfileWrapper.getErrorLevel(tool.getDisplayKey(), file).getSeverity();
for (ProblemDescriptor descriptor : inspectionResult.foundProblems) {
ProgressManager.checkCanceled();
PsiElement element = descriptor.getPsiElement();
if (element != null) {
createHighlightsForDescriptor(outInfos, emptyActionRegistered, ilManager, file, documentRange, tool, severity, descriptor, element,
myIgnoreSuppressed);
}
}
}
}
}
}
private void createHighlightsForDescriptor(@NotNull List<? super HighlightInfo> outInfos,
@NotNull Set<? super Pair<TextRange, String>> emptyActionRegistered,
@NotNull InjectedLanguageManager ilManager,
@NotNull PsiFile file,
@NotNull Document documentRange,
@NotNull LocalInspectionToolWrapper toolWrapper,
@NotNull HighlightSeverity severity,
@NotNull ProblemDescriptor descriptor,
@NotNull PsiElement element,
boolean ignoreSuppressed) {
if (descriptor instanceof ProblemDescriptorWithReporterName) {
String reportingToolName = ((ProblemDescriptorWithReporterName)descriptor).getReportingToolName();
final InspectionToolWrapper<?, ?> reportingTool = myProfileWrapper.getInspectionTool(reportingToolName, element);
LOG.assertTrue(reportingTool instanceof LocalInspectionToolWrapper, reportingToolName);
toolWrapper = (LocalInspectionToolWrapper)reportingTool;
severity = myProfileWrapper.getErrorLevel(HighlightDisplayKey.find(reportingToolName), file).getSeverity();
}
LocalInspectionTool tool = toolWrapper.getTool();
if (ignoreSuppressed && tool.isSuppressedFor(element)) {
registerSuppressedElements(element, toolWrapper.getID(), toolWrapper.getAlternativeID());
return;
}
createHighlightsForDescriptor(outInfos, emptyActionRegistered, ilManager, file, documentRange, toolWrapper, severity, descriptor, element);
}
private void createHighlightsForDescriptor(@NotNull List<? super HighlightInfo> outInfos,
@NotNull Set<? super Pair<TextRange, String>> emptyActionRegistered,
@NotNull InjectedLanguageManager ilManager,
@NotNull PsiFile file,
@NotNull Document documentRange,
@NotNull LocalInspectionToolWrapper toolWrapper,
@NotNull HighlightSeverity severity,
@NotNull ProblemDescriptor descriptor,
@NotNull PsiElement element) {
HighlightInfoType level = ProblemDescriptorUtil.highlightTypeFromDescriptor(descriptor, severity, mySeverityRegistrar);
@NlsSafe String message = ProblemDescriptorUtil.renderDescriptionMessage(descriptor, element);
ProblemGroup problemGroup = descriptor.getProblemGroup();
String problemName = problemGroup != null ? problemGroup.getProblemName() : null;
String shortName = problemName != null ? problemName : toolWrapper.getShortName();
final HighlightDisplayKey key = HighlightDisplayKey.find(shortName);
final InspectionProfile inspectionProfile = myProfileWrapper.getInspectionProfile();
if (!inspectionProfile.isToolEnabled(key, getFile())) return;
HighlightInfoType type = new InspectionHighlightInfoType(level, element);
final String plainMessage = message.startsWith("<html>")
? StringUtil.unescapeXmlEntities(XmlStringUtil.stripHtml(message).replaceAll("<[^>]*>", ""))
.replaceAll(" ", " ")
: message;
@NlsSafe String tooltip = null;
if (descriptor.showTooltip()) {
tooltip = tooltips.intern(DaemonTooltipsUtil.getWrappedTooltip(message, shortName, myShortcutText, showToolDescription(toolWrapper)));
}
List<IntentionAction> fixes = getQuickFixes(key, descriptor, emptyActionRegistered);
HighlightInfo info = highlightInfoFromDescriptor(descriptor, type, plainMessage, tooltip, element, fixes, key.getID());
if (info == null) return;
registerQuickFixes(info, fixes, shortName);
PsiFile context = getTopLevelFileInBaseLanguage(element);
PsiFile myContext = getTopLevelFileInBaseLanguage(getFile());
if (context != getFile()) {
String errorMessage = "Reported element " + element +
" is not from the file '" + file.getVirtualFile().getPath() +
"' the inspection '" + shortName +
"' (" + toolWrapper.getTool().getClass() +
") was invoked for. Message: '" + descriptor + "'.\nElement containing file: " +
context + "\nInspection invoked for file: " + myContext + "\n";
PluginException.logPluginError(LOG, errorMessage, null, toolWrapper.getTool().getClass());
}
boolean isOutsideInjected = !myInspectInjectedPsi || file == getFile();
if (isOutsideInjected) {
outInfos.add(info);
return;
}
injectToHost(outInfos, ilManager, file, documentRange, element, fixes, info, shortName);
}
private void registerSuppressedElements(@NotNull PsiElement element, String id, String alternativeID) {
mySuppressedElements.computeIfAbsent(id, shortName -> new HashSet<>()).add(element);
if (alternativeID != null) {
mySuppressedElements.computeIfAbsent(alternativeID, shortName -> new HashSet<>()).add(element);
}
}
private static void injectToHost(@NotNull List<? super HighlightInfo> outInfos,
@NotNull InjectedLanguageManager ilManager,
@NotNull PsiFile file,
@NotNull Document documentRange,
@NotNull PsiElement element,
@NotNull List<? extends IntentionAction> fixes,
@NotNull HighlightInfo info,
String shortName) {
// todo we got to separate our "internal" prefixes/suffixes from user-defined ones
// todo in the latter case the errors should be highlighted, otherwise not
List<TextRange> editables = ilManager.intersectWithAllEditableFragments(file, new TextRange(info.startOffset, info.endOffset));
for (TextRange editable : editables) {
TextRange hostRange = ((DocumentWindow)documentRange).injectedToHost(editable);
int start = hostRange.getStartOffset();
int end = hostRange.getEndOffset();
HighlightInfo.Builder builder = HighlightInfo.newHighlightInfo(info.type).range(element, start, end);
String description = info.getDescription();
if (description != null) {
builder.description(description);
}
String toolTip = info.getToolTip();
if (toolTip != null) {
builder.escapedToolTip(toolTip);
}
HighlightInfo patched = builder.createUnconditionally();
if (patched.startOffset != patched.endOffset || info.startOffset == info.endOffset) {
patched.setFromInjection(true);
registerQuickFixes(patched, fixes, shortName);
outInfos.add(patched);
}
}
}
private PsiFile getTopLevelFileInBaseLanguage(@NotNull PsiElement element) {
PsiFile file = InjectedLanguageManager.getInstance(myProject).getTopLevelFile(element);
FileViewProvider viewProvider = file.getViewProvider();
return viewProvider.getPsi(viewProvider.getBaseLanguage());
}
private static final Interner<String> tooltips = Interner.createWeakInterner();
private static boolean showToolDescription(@NotNull LocalInspectionToolWrapper tool) {
String staticDescription = tool.getStaticDescription();
return staticDescription == null || !staticDescription.isEmpty();
}
private static void registerQuickFixes(@NotNull HighlightInfo highlightInfo,
@NotNull List<? extends IntentionAction> quickFixes,
String shortName) {
final HighlightDisplayKey key = HighlightDisplayKey.find(shortName);
for (IntentionAction quickFix : quickFixes) {
QuickFixAction.registerQuickFixAction(highlightInfo, quickFix, key);
}
}
private static @NotNull List<IntentionAction> getQuickFixes(@NotNull HighlightDisplayKey key,
@NotNull ProblemDescriptor descriptor,
@NotNull Set<? super Pair<TextRange, String>> emptyActionRegistered) {
List<IntentionAction> result = new SmartList<>();
boolean needEmptyAction = true;
QuickFix[] fixes = descriptor.getFixes();
if (fixes != null && fixes.length != 0) {
for (int k = 0; k < fixes.length; k++) {
QuickFix fix = fixes[k];
if (fix == null) throw new IllegalStateException("Inspection " + key + " returns null quick fix in its descriptor: " + descriptor + "; array: " +
Arrays.toString(fixes));
result.add(QuickFixWrapper.wrap(descriptor, k));
needEmptyAction = false;
}
}
HintAction hintAction = descriptor instanceof ProblemDescriptorImpl ? ((ProblemDescriptorImpl)descriptor).getHintAction() : null;
if (hintAction != null) {
result.add(hintAction);
needEmptyAction = false;
}
if (((ProblemDescriptorBase)descriptor).getEnforcedTextAttributes() != null) {
needEmptyAction = false;
}
if (needEmptyAction && emptyActionRegistered.add(Pair.create(((ProblemDescriptorBase)descriptor).getTextRange(), key.toString()))) {
String displayNameByKey = HighlightDisplayKey.getDisplayNameByKey(key);
LOG.assertTrue(displayNameByKey != null, key.toString());
IntentionAction emptyIntentionAction = new EmptyIntentionAction(displayNameByKey);
result.add(emptyIntentionAction);
}
return result;
}
private static void getElementsAndDialectsFrom(@NotNull PsiFile file,
@NotNull List<? super PsiElement> outElements,
@NotNull Set<? super String> outDialects) {
final FileViewProvider viewProvider = file.getViewProvider();
Set<Language> processedLanguages = new SmartHashSet<>();
final PsiElementVisitor visitor = new PsiRecursiveElementVisitor() {
@Override
public void visitElement(@NotNull PsiElement element) {
ProgressManager.checkCanceled();
PsiElement child = element.getFirstChild();
while (child != null) {
outElements.add(child);
child.accept(this);
appendDialects(child, processedLanguages, outDialects);
child = child.getNextSibling();
}
}
};
for (Language language : viewProvider.getLanguages()) {
final PsiFile psiRoot = viewProvider.getPsi(language);
if (psiRoot == null || !HighlightingLevelManager.getInstance(file.getProject()).shouldInspect(psiRoot)) {
continue;
}
outElements.add(psiRoot);
psiRoot.accept(visitor);
appendDialects(psiRoot, processedLanguages, outDialects);
}
}
private static void appendDialects(@NotNull PsiElement element,
@NotNull Set<? super Language> outProcessedLanguages,
@NotNull Set<? super String> outDialectIds) {
Language language = element.getLanguage();
outDialectIds.add(language.getID());
if (outProcessedLanguages.add(language)) {
for (Language dialect : language.getDialects()) {
outDialectIds.add(dialect.getID());
}
}
}
@NotNull
List<LocalInspectionToolWrapper> getInspectionTools(@NotNull InspectionProfileWrapper profile) {
List<InspectionToolWrapper<?, ?>> toolWrappers = profile.getInspectionProfile().getInspectionTools(getFile());
InspectionProfileWrapper.checkInspectionsDuplicates(toolWrappers);
List<LocalInspectionToolWrapper> enabled = new ArrayList<>();
for (InspectionToolWrapper<?, ?> toolWrapper : toolWrappers) {
ProgressManager.checkCanceled();
if (toolWrapper instanceof LocalInspectionToolWrapper && !isAcceptableLocalTool((LocalInspectionToolWrapper)toolWrapper)) {
continue;
}
final HighlightDisplayKey key = toolWrapper.getDisplayKey();
if (!profile.isToolEnabled(key, getFile())) continue;
if (HighlightDisplayLevel.DO_NOT_SHOW.equals(profile.getErrorLevel(key, getFile()))) continue;
LocalInspectionToolWrapper wrapper;
if (toolWrapper instanceof LocalInspectionToolWrapper) {
wrapper = (LocalInspectionToolWrapper)toolWrapper;
}
else {
wrapper = ((GlobalInspectionToolWrapper)toolWrapper).getSharedLocalInspectionToolWrapper();
if (wrapper == null || !isAcceptableLocalTool(wrapper)) continue;
}
String language = wrapper.getLanguage();
if (language != null && Language.findLanguageByID(language) == null) {
continue; // filter out at least unknown languages
}
if (myIgnoreSuppressed && wrapper.getTool().isSuppressedFor(getFile())) {
continue;
}
enabled.add(wrapper);
}
return enabled;
}
protected boolean isAcceptableLocalTool(@NotNull LocalInspectionToolWrapper wrapper) {
return true;
}
private void doInspectInjectedPsi(@NotNull PsiFile injectedPsi,
final boolean isOnTheFly,
final @NotNull ProgressIndicator indicator,
@NotNull InspectionManager iManager,
final boolean inVisibleRange,
@NotNull List<? extends LocalInspectionToolWrapper> wrappers) {
final PsiElement host = InjectedLanguageManager.getInstance(injectedPsi.getProject()).getInjectionHost(injectedPsi);
List<PsiElement> elements = new ArrayList<>();
Set<String> elementDialectIds = new SmartHashSet<>();
getElementsAndDialectsFrom(injectedPsi, elements, elementDialectIds);
if (elements.isEmpty()) {
return;
}
List<LocalInspectionToolWrapper> applicableTools = InspectionEngine.filterToolsApplicableByLanguage(wrappers, elementDialectIds);
for (LocalInspectionToolWrapper wrapper : applicableTools) {
ProgressManager.checkCanceled();
final LocalInspectionTool tool = wrapper.getTool();
ProblemsHolder holder = new ProblemsHolder(iManager, injectedPsi, isOnTheFly) {
@Override
public void registerProblem(@NotNull ProblemDescriptor descriptor) {
if (host != null && myIgnoreSuppressed && tool.isSuppressedFor(host)) {
registerSuppressedElements(host, wrapper.getID(), wrapper.getAlternativeID());
return;
}
super.registerProblem(descriptor);
if (isOnTheFly && inVisibleRange) {
addDescriptorIncrementally(descriptor, wrapper, indicator);
}
}
};
LocalInspectionToolSession injSession = new LocalInspectionToolSession(injectedPsi, 0, injectedPsi.getTextLength());
InspectionEngine.createVisitorAndAcceptElements(tool, holder, isOnTheFly, injSession, elements);
tool.inspectionFinished(injSession, holder);
List<ProblemDescriptor> problems = holder.getResults();
if (!problems.isEmpty()) {
appendDescriptors(injectedPsi, problems, wrapper);
}
}
}
@Override
public @NotNull List<HighlightInfo> getInfos() {
return myInfos;
}
private static final class InspectionResult {
private final @NotNull LocalInspectionToolWrapper tool;
private final @NotNull List<? extends ProblemDescriptor> foundProblems;
private InspectionResult(@NotNull LocalInspectionToolWrapper tool, @NotNull List<? extends ProblemDescriptor> foundProblems) {
this.tool = tool;
this.foundProblems = new ArrayList<>(foundProblems);
}
}
private static final class InspectionContext {
private InspectionContext(@NotNull LocalInspectionToolWrapper tool,
@NotNull ProblemsHolder holder,
int problemsSize, // need this to diff between found problems in visible part and the rest
@NotNull PsiElementVisitor visitor) {
this.tool = tool;
this.holder = holder;
this.problemsSize = problemsSize;
this.visitor = visitor;
}
private final @NotNull LocalInspectionToolWrapper tool;
private final @NotNull ProblemsHolder holder;
private final int problemsSize;
private final @NotNull PsiElementVisitor visitor;
}
public static class InspectionHighlightInfoType extends HighlightInfoType.HighlightInfoTypeImpl {
InspectionHighlightInfoType(@NotNull HighlightInfoType level, @NotNull PsiElement element) {
super(level.getSeverity(element), level.getAttributesKey());
}
}
private static @Nls String getPresentableNameText() {
return DaemonBundle.message("pass.inspection");
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.