gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Copyright LWJGL. All rights reserved.
* License terms: https://www.lwjgl.org/license
*/
package org.lwjgl.opengl;
import org.lwjgl.system.*;
import org.lwjgl.system.macosx.MacOSXLibrary;
import org.lwjgl.system.windows.*;
import java.lang.reflect.Field;
import java.nio.ByteBuffer;
import java.nio.IntBuffer;
import java.util.*;
import static java.lang.Math.*;
import static org.lwjgl.opengl.GL11.*;
import static org.lwjgl.opengl.GL30.*;
import static org.lwjgl.opengl.GL32.*;
import static org.lwjgl.opengl.GLX.*;
import static org.lwjgl.opengl.GLX11.*;
import static org.lwjgl.opengl.WGL.*;
import static org.lwjgl.system.APIUtil.*;
import static org.lwjgl.system.Checks.*;
import static org.lwjgl.system.JNI.*;
import static org.lwjgl.system.MemoryStack.*;
import static org.lwjgl.system.MemoryUtil.*;
import static org.lwjgl.system.ThreadLocalUtil.*;
import static org.lwjgl.system.linux.X11.*;
import static org.lwjgl.system.windows.GDI32.*;
import static org.lwjgl.system.windows.User32.*;
import static org.lwjgl.system.windows.WindowsUtil.*;
/**
* This class must be used before any OpenGL function is called. It has the following responsibilities:
* <ul>
* <li>Loads the OpenGL native library into the JVM process.</li>
* <li>Creates instances of {@link GLCapabilities} classes. A {@code GLCapabilities} instance contains flags for functionality that is available in an OpenGL
* context. Internally, it also contains function pointers that are only valid in that specific OpenGL context.</li>
* <li>Maintains thread-local state for {@code GLCapabilities} instances, corresponding to OpenGL contexts that are current in those threads.</li>
* </ul>
*
* <h3>Library lifecycle</h3>
* <p>The OpenGL library is loaded automatically when this class is initialized. Set the {@link Configuration#OPENGL_EXPLICIT_INIT} option to override this
* behavior. Manual loading/unloading can be achieved with the {@link #create} and {@link #destroy} functions. The name of the library loaded can be overridden
* with the {@link Configuration#OPENGL_LIBRARY_NAME} option. The maximum OpenGL version loaded can be set with the {@link Configuration#OPENGL_MAXVERSION}
* option. This can be useful to ensure that no functionality above a specific version is used during development.</p>
*
* <h3>GLCapabilities creation</h3>
* <p>Instances of {@code GLCapabilities} can be created with the {@link #createCapabilities} method. An OpenGL context must be current in the current thread
* before it is called. Calling this method is expensive, so the {@code GLCapabilities} instance should be associated with the OpenGL context and reused as
* necessary.</p>
*
* <h3>Thread-local state</h3>
* <p>Before a function for a given OpenGL context can be called, the corresponding {@code GLCapabilities} instance must be passed to the
* {@link #setCapabilities} method. The user is also responsible for clearing the current {@code GLCapabilities} instance when the context is destroyed or made
* current in another thread.</p>
*
* <p>Note that the {@link #createCapabilities} method implicitly calls {@link #setCapabilities} with the newly created instance.</p>
*/
public final class GL {
private static final APIVersion MAX_VERSION;
private static FunctionProvider functionProvider;
/** See {@link Configuration#OPENGL_CAPABILITIES_STATE}. */
private static final CapabilitiesState capabilitiesState;
private static WGLCapabilities capabilitiesWGL;
private static GLXCapabilities capabilitiesGLXClient;
private static GLXCapabilities capabilitiesGLX;
static {
MAX_VERSION = apiParseVersion(Configuration.OPENGL_MAXVERSION);
String capsStateType = Configuration.OPENGL_CAPABILITIES_STATE.get("ThreadLocal");
if ( "static".equals(capsStateType) )
capabilitiesState = new StaticCapabilitiesState();
else if ( "ThreadLocal".equals(capsStateType) )
capabilitiesState = new TLCapabilitiesState();
else
throw new IllegalStateException("Invalid " + Configuration.OPENGL_CAPABILITIES_STATE.getProperty() + " specified.");
if ( !Configuration.OPENGL_EXPLICIT_INIT.get(false) )
create();
}
private GL() {}
/** Loads the OpenGL native library, using the default library name. */
public static void create() {
SharedLibrary GL;
switch ( Platform.get() ) {
case LINUX:
GL = Library.loadNative(Configuration.OPENGL_LIBRARY_NAME, "libGL.so.1", "libGL.so");
break;
case MACOSX:
GL = Configuration.OPENGL_LIBRARY_NAME.get() != null
? Library.loadNative(Configuration.OPENGL_LIBRARY_NAME)
: MacOSXLibrary.getWithIdentifier("com.apple.opengl");
break;
case WINDOWS:
GL = Library.loadNative(Configuration.OPENGL_LIBRARY_NAME, "opengl32");
break;
default:
throw new IllegalStateException();
}
create(GL);
}
/**
* Loads the OpenGL native library, using the specified library name.
*
* @param libName the native library name
*/
public static void create(String libName) {
create(Library.loadNative(libName));
}
private abstract static class SharedLibraryGL extends SharedLibrary.Delegate {
SharedLibraryGL(SharedLibrary library) {
super(library);
}
abstract long getExtensionAddress(long name);
@Override
public long getFunctionAddress(ByteBuffer functionName) {
long address = getExtensionAddress(memAddress(functionName));
if ( address == NULL ) {
address = library.getFunctionAddress(functionName);
if ( address == NULL && DEBUG_FUNCTIONS )
apiLog("Failed to locate address for GL function " + memASCII(functionName));
}
return address;
}
}
private static void create(SharedLibrary OPENGL) {
FunctionProvider functionProvider;
try {
switch ( Platform.get() ) {
case WINDOWS:
functionProvider = new SharedLibraryGL(OPENGL) {
private final long wglGetProcAddress = library.getFunctionAddress("wglGetProcAddress");
@Override
long getExtensionAddress(long name) {
return callPP(wglGetProcAddress, name);
}
};
break;
case LINUX:
functionProvider = new SharedLibraryGL(OPENGL) {
private final long glXGetProcAddress;
{
long GetProcAddress = library.getFunctionAddress("glXGetProcAddress");
if ( GetProcAddress == NULL )
GetProcAddress = library.getFunctionAddress("glXGetProcAddressARB");
glXGetProcAddress = GetProcAddress;
}
@Override
long getExtensionAddress(long name) {
return glXGetProcAddress == NULL ? NULL : callPP(glXGetProcAddress, name);
}
};
break;
case MACOSX:
functionProvider = new SharedLibraryGL(OPENGL) {
@Override
long getExtensionAddress(long name) {
return NULL;
}
};
break;
default:
throw new IllegalStateException();
}
create(functionProvider);
} catch (RuntimeException e) {
OPENGL.free();
throw e;
}
}
/**
* Initializes OpenGL with the specified {@link FunctionProvider}. This method can be used to implement custom OpenGL library loading.
*
* @param functionProvider the provider of OpenGL function addresses
*/
public static void create(FunctionProvider functionProvider) {
if ( GL.functionProvider != null )
throw new IllegalStateException("OpenGL has already been created.");
GL.functionProvider = functionProvider;
}
/** Unloads the OpenGL native library. */
public static void destroy() {
if ( functionProvider == null )
return;
capabilitiesWGL = null;
capabilitiesGLX = null;
if ( functionProvider instanceof NativeResource )
((NativeResource)functionProvider).free();
functionProvider = null;
}
/** Returns the {@link FunctionProvider} for the OpenGL native library. */
public static FunctionProvider getFunctionProvider() {
return functionProvider;
}
/**
* Sets the {@link GLCapabilities} of the OpenGL context that is current in the current thread.
*
* <p>This {@code GLCapabilities} instance will be used by any OpenGL call in the current thread, until {@code setCapabilities} is called again with a
* different value.</p>
*/
public static void setCapabilities(GLCapabilities caps) {
capabilitiesState.set(caps);
}
/**
* Returns the {@link GLCapabilities} of the OpenGL context that is current in the current thread.
*
* @throws IllegalStateException if {@link #setCapabilities} has never been called in the current thread or was last called with a {@code null} value
*/
public static GLCapabilities getCapabilities() {
GLCapabilities caps = capabilitiesState.get();
if ( caps == null )
throw new IllegalStateException("No GLCapabilities instance set for the current thread. Possible solutions:\n" +
"\ta) Call GL.createCapabilities() after making a context current in the current thread.\n" +
"\tb) Call GL.setCapabilities() if a GLCapabilities instance already exists for the current context.");
return caps;
}
/**
* Returns the WGL capabilities.
*
* <p>This method may only be used on Windows.</p>
*/
public static WGLCapabilities getCapabilitiesWGL() {
if ( capabilitiesWGL == null )
capabilitiesWGL = createCapabilitiesWGLDummy();
return capabilitiesWGL;
}
/** Returns the GLX client capabilities. */
static GLXCapabilities getCapabilitiesGLXClient() {
if ( capabilitiesGLXClient == null )
capabilitiesGLXClient = initCapabilitiesGLX(true);
return capabilitiesGLXClient;
}
/**
* Returns the GLX capabilities.
*
* <p>This method may only be used on Linux.</p>
*/
public static GLXCapabilities getCapabilitiesGLX() {
if ( capabilitiesGLX == null )
capabilitiesGLX = initCapabilitiesGLX(false);
return capabilitiesGLX;
}
private static GLXCapabilities initCapabilitiesGLX(boolean client) {
long display = nXOpenDisplay(NULL);
try {
return createCapabilitiesGLX(display, client ? -1 : XDefaultScreen(display));
} finally {
XCloseDisplay(display);
}
}
/**
* Creates a new {@link GLCapabilities} instance for the OpenGL context that is current in the current thread.
*
* <p>Depending on the current context, the instance returned may or may not contain the deprecated functionality removed since OpenGL version 3.1.</p>
*
* <p>This method calls {@link #setCapabilities(GLCapabilities)} with the new instance before returning.</p>
*
* @return the GLCapabilities instance
*/
public static GLCapabilities createCapabilities() {
return createCapabilities(false);
}
/**
* Creates a new {@link GLCapabilities} instance for the OpenGL context that is current in the current thread.
*
* <p>Depending on the current context, the instance returned may or may not contain the deprecated functionality removed since OpenGL version 3.1. The
* {@code forwardCompatible} flag will force LWJGL to not load the deprecated functions, even if the current context exposes them.</p>
*
* <p>This method calls {@link #setCapabilities(GLCapabilities)} with the new instance before returning.</p>
*
* @param forwardCompatible if true, LWJGL will create forward compatible capabilities
*
* @return the GLCapabilities instance
*/
public static GLCapabilities createCapabilities(boolean forwardCompatible) {
GLCapabilities caps = null;
try {
// We don't have a current ContextCapabilities when this method is called
// so we have to use the native bindings directly.
long GetError = functionProvider.getFunctionAddress("glGetError");
long GetString = functionProvider.getFunctionAddress("glGetString");
long GetIntegerv = functionProvider.getFunctionAddress("glGetIntegerv");
if ( GetError == NULL || GetString == NULL || GetIntegerv == NULL )
throw new IllegalStateException("Core OpenGL functions could not be found. Make sure that the OpenGL library has been loaded correctly.");
int errorCode = callI(GetError);
if ( errorCode != GL_NO_ERROR )
apiLog(String.format("An OpenGL context was in an error state before the creation of its capabilities instance. Error: 0x%X", errorCode));
int majorVersion;
int minorVersion;
try ( MemoryStack stack = stackPush() ) {
IntBuffer version = stack.ints(0);
// Try the 3.0+ version query first
callPV(GetIntegerv, GL_MAJOR_VERSION, memAddress(version));
if ( callI(GetError) == GL_NO_ERROR && 3 <= (majorVersion = version.get(0)) ) {
// We're on an 3.0+ context.
callPV(GetIntegerv, GL_MINOR_VERSION, memAddress(version));
minorVersion = version.get(0);
} else {
// Fallback to the string query.
long versionString = callP(GetString, GL_VERSION);
if ( versionString == NULL || callI(GetError) != GL_NO_ERROR )
throw new IllegalStateException("There is no OpenGL context current in the current thread.");
APIVersion apiVersion = apiParseVersion(memUTF8(versionString));
majorVersion = apiVersion.major;
minorVersion = apiVersion.minor;
}
}
if ( majorVersion < 1 || (majorVersion == 1 && minorVersion < 1) )
throw new IllegalStateException("OpenGL 1.1 is required.");
int[] GL_VERSIONS = {
5, // OpenGL 1.1 to 1.5
1, // OpenGL 2.0 to 2.1
3, // OpenGL 3.0 to 3.3
5, // OpenGL 4.0 to 4.5
};
Set<String> supportedExtensions = new HashSet<>(512);
int maxMajor = min(majorVersion, GL_VERSIONS.length);
if ( MAX_VERSION != null )
maxMajor = min(MAX_VERSION.major, maxMajor);
for ( int M = 1; M <= maxMajor; M++ ) {
int maxMinor = GL_VERSIONS[M - 1];
if ( M == majorVersion )
maxMinor = min(minorVersion, maxMinor);
if ( MAX_VERSION != null && M == MAX_VERSION.major )
maxMinor = min(MAX_VERSION.minor, maxMinor);
for ( int m = M == 1 ? 1 : 0; m <= maxMinor; m++ )
supportedExtensions.add(String.format("OpenGL%d%d", M, m));
}
if ( majorVersion < 3 ) {
// Parse EXTENSIONS string
String extensionsString = memASCII(checkPointer(callP(GetString, GL_EXTENSIONS)));
StringTokenizer tokenizer = new StringTokenizer(extensionsString);
while ( tokenizer.hasMoreTokens() )
supportedExtensions.add(tokenizer.nextToken());
} else {
// Use indexed EXTENSIONS
try ( MemoryStack stack = stackPush() ) {
IntBuffer pi = stack.ints(0);
callPV(GetIntegerv, GL_NUM_EXTENSIONS, memAddress(pi));
int extensionCount = pi.get(0);
long GetStringi = apiGetFunctionAddress(functionProvider, "glGetStringi");
for ( int i = 0; i < extensionCount; i++ )
supportedExtensions.add(memASCII(callP(GetStringi, GL_EXTENSIONS, i)));
// In real drivers, we may encounter the following weird scenarios:
// - 3.1 context without GL_ARB_compatibility but with deprecated functionality exposed and working.
// - Core or forward-compatible context with GL_ARB_compatibility exposed, but not working when used.
// We ignore these and go by the spec.
// Force forwardCompatible to true if the context is a forward-compatible context.
callPV(GetIntegerv, GL_CONTEXT_FLAGS, memAddress(pi));
if ( (pi.get(0) & GL_CONTEXT_FLAG_FORWARD_COMPATIBLE_BIT) != 0 )
forwardCompatible = true;
else {
// Force forwardCompatible to true if the context is a core profile context.
if ( (3 < majorVersion || 1 <= minorVersion) ) { // OpenGL 3.1+
if ( 3 < majorVersion || 2 <= minorVersion ) { // OpenGL 3.2+
callPV(GetIntegerv, GL_CONTEXT_PROFILE_MASK, memAddress(pi));
if ( (pi.get(0) & GL_CONTEXT_CORE_PROFILE_BIT) != 0 )
forwardCompatible = true;
} else
forwardCompatible = !supportedExtensions.contains("GL_ARB_compatibility");
}
}
}
}
return caps = new GLCapabilities(getFunctionProvider(), supportedExtensions, forwardCompatible);
} finally {
setCapabilities(caps);
}
}
/** Creates a dummy context and retrieves the WGL capabilities. */
private static WGLCapabilities createCapabilitiesWGLDummy() {
long hdc = wglGetCurrentDC(); // just use the current context if one exists
if ( hdc != NULL )
return createCapabilitiesWGL(hdc);
short classAtom = 0;
long hwnd = NULL;
long hglrc = NULL;
try ( MemoryStack stack = stackPush() ) {
WNDCLASSEX wc = WNDCLASSEX.callocStack(stack)
.cbSize(WNDCLASSEX.SIZEOF)
.style(CS_HREDRAW | CS_VREDRAW)
.hInstance(WindowsLibrary.HINSTANCE)
.lpszClassName(stack.UTF16("WGL"));
WNDCLASSEX.nlpfnWndProc(wc.address(), User32.Functions.DefWindowProc);
classAtom = RegisterClassEx(wc);
if ( classAtom == 0 )
throw new IllegalStateException("Failed to register WGL window class");
hwnd = checkPointer(nCreateWindowEx(
0, classAtom & 0xFFFF, NULL,
WS_OVERLAPPEDWINDOW | WS_CLIPCHILDREN | WS_CLIPSIBLINGS,
0, 0, 1, 1,
NULL, NULL, NULL, NULL
));
hdc = checkPointer(GetDC(hwnd));
PIXELFORMATDESCRIPTOR pfd = PIXELFORMATDESCRIPTOR.callocStack(stack)
.nSize((short)PIXELFORMATDESCRIPTOR.SIZEOF)
.nVersion((short)1)
.dwFlags(PFD_SUPPORT_OPENGL); // we don't care about anything else
int pixelFormat = ChoosePixelFormat(hdc, pfd);
if ( pixelFormat == 0 )
windowsThrowException("Failed to choose an OpenGL-compatible pixel format");
if ( DescribePixelFormat(hdc, pixelFormat, pfd) == 0 )
windowsThrowException("Failed to obtain pixel format information");
if ( !SetPixelFormat(hdc, pixelFormat, pfd) )
windowsThrowException("Failed to set the pixel format");
hglrc = checkPointer(wglCreateContext(hdc));
wglMakeCurrent(hdc, hglrc);
return createCapabilitiesWGL(hdc);
} finally {
if ( hglrc != NULL ) {
wglMakeCurrent(NULL, NULL);
wglDeleteContext(hglrc);
}
if ( hwnd != NULL )
DestroyWindow(hwnd);
if ( classAtom != 0 )
nUnregisterClass(classAtom & 0xFFFF, WindowsLibrary.HINSTANCE);
}
}
/**
* Creates a {@link WGLCapabilities} instance for the context that is current in the current thread.
*
* <p>This method may only be used on Windows.</p>
*/
public static WGLCapabilities createCapabilitiesWGL() {
long hdc = wglGetCurrentDC();
if ( hdc == NULL )
throw new IllegalStateException("Failed to retrieve the device context of the current OpenGL context");
return createCapabilitiesWGL(hdc);
}
/**
* Creates a {@link WGLCapabilities} instance for the specified device context.
*
* @param hdc the device context handle ({@code HDC})
*/
private static WGLCapabilities createCapabilitiesWGL(long hdc) {
String extensionsString = null;
long wglGetExtensionsString = functionProvider.getFunctionAddress("wglGetExtensionsStringARB");
if ( wglGetExtensionsString != NULL )
extensionsString = memASCII(callPP(wglGetExtensionsString, hdc));
else {
wglGetExtensionsString = functionProvider.getFunctionAddress("wglGetExtensionsStringEXT");
if ( wglGetExtensionsString != NULL )
extensionsString = memASCII(callP(wglGetExtensionsString));
}
Set<String> supportedExtensions = new HashSet<>(32);
if ( extensionsString != null ) {
StringTokenizer tokenizer = new StringTokenizer(extensionsString);
while ( tokenizer.hasMoreTokens() )
supportedExtensions.add(tokenizer.nextToken());
}
return new WGLCapabilities(functionProvider, supportedExtensions);
}
/**
* Creates a {@link GLXCapabilities} instance for the default screen of the specified X connection.
*
* <p>This method may only be used on Linux.</p>
*
* @param display the X connection handle ({@code DISPLAY})
*/
public static GLXCapabilities createCapabilitiesGLX(long display) {
return createCapabilitiesGLX(display, XDefaultScreen(display));
}
/**
* Creates a {@link GLXCapabilities} instance for the specified screen of the specified X connection.
*
* <p>This method may only be used on Linux.</p>
*
* @param display the X connection handle ({@code DISPLAY})
* @param screen the screen index
*/
public static GLXCapabilities createCapabilitiesGLX(long display, int screen) {
int majorVersion;
int minorVersion;
try ( MemoryStack stack = stackPush() ) {
IntBuffer piMajor = stack.ints(0);
IntBuffer piMinor = stack.ints(0);
if ( glXQueryVersion(display, piMajor, piMinor) == 0 )
throw new IllegalStateException("Failed to query GLX version");
majorVersion = piMajor.get(0);
minorVersion = piMinor.get(0);
if ( majorVersion != 1 )
throw new IllegalStateException("Invalid GLX major version: " + majorVersion);
}
Set<String> supportedExtensions = new HashSet<>(32);
int[][] GLX_VERSIONS = {
{ 1, 2, 3, 4 }
};
for ( int major = 1; major <= GLX_VERSIONS.length; major++ ) {
int[] minors = GLX_VERSIONS[major - 1];
for ( int minor : minors ) {
if ( major < majorVersion || (major == majorVersion && minor <= minorVersion) )
supportedExtensions.add("GLX" + Integer.toString(major) + Integer.toString(minor));
}
}
if ( 1 <= minorVersion ) {
long extensionsString;
if ( screen == -1 ) {
long glXGetClientString = functionProvider.getFunctionAddress("glXGetClientString");
extensionsString = callPP(glXGetClientString, display, GLX_EXTENSIONS);
} else {
long glXQueryExtensionsString = functionProvider.getFunctionAddress("glXQueryExtensionsString");
extensionsString = callPP(glXQueryExtensionsString, display, screen);
}
StringTokenizer tokenizer = new StringTokenizer(memASCII(extensionsString));
while ( tokenizer.hasMoreTokens() )
supportedExtensions.add(tokenizer.nextToken());
}
return new GLXCapabilities(functionProvider, supportedExtensions);
}
/** Manages the thread-local {@link GLCapabilities} state. */
private interface CapabilitiesState {
void set(GLCapabilities caps);
GLCapabilities get();
}
/** Default {@link CapabilitiesState} implementation using {@link ThreadLocalState}. */
private static class TLCapabilitiesState implements CapabilitiesState {
@Override
public void set(GLCapabilities caps) { tlsGet().capsGL = caps; }
@Override
public GLCapabilities get() { return tlsGet().capsGL; }
}
/** Optional, write-once {@link CapabilitiesState}. */
private static class StaticCapabilitiesState implements CapabilitiesState {
private static final List<Field> flags;
private static final List<Field> funcs;
static {
if ( Checks.DEBUG ) {
Field[] fields = GLCapabilities.class.getFields();
flags = new ArrayList<>(512);
funcs = new ArrayList<>(256);
for ( Field f : fields )
(f.getType() == Boolean.TYPE ? flags : funcs).add(f);
} else {
flags = null;
funcs = null;
}
}
private static GLCapabilities tempCaps;
@Override
public void set(GLCapabilities caps) {
if ( Checks.DEBUG )
checkCapabilities(caps);
tempCaps = caps;
}
private static void checkCapabilities(GLCapabilities caps) {
if ( caps != null && tempCaps != null && !apiCompareCapabilities(flags, funcs, tempCaps, caps) )
apiLog("An OpenGL context with different functionality detected! The ThreadLocal capabilities state must be used.");
}
@Override
public GLCapabilities get() {
return WriteOnce.caps;
}
private static final class WriteOnce {
// This will be initialized the first time get() above is called
private static final GLCapabilities caps = StaticCapabilitiesState.tempCaps;
static {
if ( caps == null )
throw new IllegalStateException("The static GLCapabilities instance is null");
}
}
}
}
| |
package edu.utexas.cycic;
import java.io.File;
import edu.utah.sci.cyclist.Cyclist;
import edu.utah.sci.cyclist.core.Resources1;
import javafx.event.ActionEvent;
import javafx.event.EventHandler;
import javafx.geometry.Pos;
import javafx.scene.control.Button;
import javafx.scene.control.ButtonType;
import javafx.scene.control.Dialog;
import javafx.scene.control.ButtonBar.ButtonData;
import javafx.scene.control.Hyperlink;
import javafx.scene.control.Label;
import javafx.scene.image.Image;
import javafx.scene.image.ImageView;
import javafx.scene.input.MouseEvent;
import javafx.scene.layout.GridPane;
import javafx.scene.layout.HBox;
import javafx.scene.layout.VBox;
import javafx.scene.web.WebEngine;
import javafx.scene.web.WebView;
public class CyclusHelp {
/**
*
* @param help
*/
public static void showHelpDialog() {
VBox viewBox = new VBox();
Dialog<?> dg = new Dialog();
dg.setResizable(true);
dg.setTitle("Regions");
Label text = new Label("This is designed to give you a brief overview of the Cyclus fuel cycle" +
" simulator system. Cyclus uses an agent based approach for modeling specific parts of " +
"the nuclear fuel cycle. The three main types of agents are: REGIONS, INSTITUTIONS and FACILITIES." +
" Facilities may trade RESOURCES to each other through the use of COMMODITIES. Each of these topics" +
" will be covered in this guide. You may advance to the next topic using the 'Next' button " +
"or return to a previous topic using the 'Previous' button.");
text.setWrapText(true);
Hyperlink link = new Hyperlink("Cyclus");
WebView browser = new WebView();
WebEngine webEngine = browser.getEngine();
link.setOnAction(new EventHandler<ActionEvent>(){
public void handle(ActionEvent e){
//webEngine.load("fuelcycle.org");
}
});
viewBox.getChildren().addAll(text);
viewBox.setSpacing(10);
viewBox.setPrefWidth(200);
dg.getDialogPane().setContent(viewBox);
dg.getDialogPane().getButtonTypes().addAll(ButtonType.NEXT, ButtonType.OK);
dg.showAndWait().ifPresent(response -> {
if (response == ButtonType.OK) {
dg.close();
} else if (response == ButtonType.NEXT){
helpDialogCyclus();
}
});
}
public static void helpDialogCyclus() {
Dialog<?> dg = new Dialog();
dg.setResizable(true);
dg.setTitle("Cyclus");
VBox viewBox = new VBox();
Image igm = new Image(Cyclist.class.getResource("assets/HelpImages/CyclusOverview.png").toExternalForm());
ImageView igmView = new ImageView(igm);
viewBox.getChildren().addAll(igmView);
dg.getDialogPane().setContent(viewBox);
dg.getDialogPane().getButtonTypes().addAll(ButtonType.NEXT, ButtonType.PREVIOUS, ButtonType.OK);
dg.showAndWait().ifPresent(response -> {
if (response == ButtonType.OK) {
dg.close();
} else if (response == ButtonType.NEXT){
helpDialogRegions();
} else if (response == ButtonType.PREVIOUS){
showHelpDialog();
}
});
}
public static void helpDialogRegions() {
Dialog<?> dg = new Dialog();
dg.setResizable(true);
dg.setTitle("Regions");
VBox viewBox = new VBox();
Image igm = new Image(Cyclist.class.getResource("assets/HelpImages/Regions.png").toExternalForm());
ImageView igmView = new ImageView(igm);
viewBox.getChildren().addAll(igmView);
dg.getDialogPane().setContent(viewBox);
dg.getDialogPane().getButtonTypes().addAll(ButtonType.NEXT, ButtonType.PREVIOUS, ButtonType.OK);
dg.showAndWait().ifPresent(response -> {
if (response == ButtonType.OK) {
dg.close();
} else if (response == ButtonType.NEXT){
helpDialogInstitutions();
} else if (response == ButtonType.PREVIOUS){
helpDialogCyclus();
}
});
}
public static void helpDialogInstitutions() {
Dialog<?> dg = new Dialog();
dg.setResizable(true);
dg.setTitle("Institutions");
VBox viewBox = new VBox();
Image igm = new Image(Cyclist.class.getResource("assets/HelpImages/Institutions.png").toExternalForm());
ImageView igmView = new ImageView(igm);
viewBox.getChildren().addAll(igmView);
dg.getDialogPane().setContent(viewBox);
dg.getDialogPane().getButtonTypes().addAll(ButtonType.NEXT, ButtonType.PREVIOUS, ButtonType.OK);
dg.showAndWait().ifPresent(response -> {
if (response == ButtonType.OK) {
dg.close();
} else if (response == ButtonType.NEXT){
helpDialogFacilities();
} else if (response == ButtonType.PREVIOUS){
helpDialogRegions();
}
});
}
public static void helpDialogFacilities() {
Dialog<?> dg = new Dialog();
dg.setResizable(true);
dg.setTitle("Facilities");
VBox viewBox = new VBox();
Image igm = new Image(Cyclist.class.getResource("assets/HelpImages/Facilities.png").toExternalForm());
ImageView igmView = new ImageView(igm);
viewBox.getChildren().addAll(igmView);
dg.getDialogPane().setContent(viewBox);
dg.getDialogPane().getButtonTypes().addAll(ButtonType.NEXT, ButtonType.PREVIOUS, ButtonType.OK);
dg.showAndWait().ifPresent(response -> {
if (response == ButtonType.OK) {
dg.close();
} else if (response == ButtonType.NEXT){
helpDialogResources();
} else if (response == ButtonType.PREVIOUS){
helpDialogInstitutions();
}
});
}
public static void helpDialogResources() {
Dialog<?> dg = new Dialog();
dg.setResizable(true);
dg.setTitle("Resources");
VBox viewBox = new VBox();
Image igm = new Image(Cyclist.class.getResource("assets/HelpImages/Resources.png").toExternalForm());
ImageView igmView = new ImageView(igm);
viewBox.getChildren().addAll(igmView);
dg.getDialogPane().setContent(viewBox);
dg.getDialogPane().getButtonTypes().addAll(ButtonType.NEXT, ButtonType.PREVIOUS, ButtonType.OK);
dg.showAndWait().ifPresent(response -> {
if (response == ButtonType.OK) {
dg.close();
} else if (response == ButtonType.NEXT){
helpDialogCommodities();
} else if (response == ButtonType.PREVIOUS){
helpDialogFacilities();
}
});
}
public static void helpDialogCommodities() {
Dialog<?> dg = new Dialog();
dg.setResizable(true);
dg.setTitle("Commodities");
VBox viewBox = new VBox();
Image igm = new Image(Cyclist.class.getResource("assets/HelpImages/Commodities.png").toExternalForm());
ImageView igmView = new ImageView(igm);
viewBox.getChildren().addAll(igmView);
dg.getDialogPane().setContent(viewBox);
dg.getDialogPane().getButtonTypes().addAll(ButtonType.PREVIOUS, ButtonType.OK);
dg.showAndWait().ifPresent(response -> {
if (response == ButtonType.OK) {
dg.close();
} else if (response == ButtonType.PREVIOUS){
helpDialogResources();
}
});
}
/**
*
* @param help
* @return
*/
public static EventHandler<ActionEvent> helpDialogHandler() {
return new EventHandler<ActionEvent>() {
public void handle(ActionEvent e){
showHelpDialog();
}
};
}
}
| |
/*******************************************************************************
* Caleydo - Visualization for Molecular Biology - http://caleydo.org
* Copyright (c) The Caleydo Team. All rights reserved.
* Licensed under the new BSD license, available at http://caleydo.org/license
******************************************************************************/
package org.caleydo.core.view.opengl.layout2.basic;
import java.util.Locale;
import org.caleydo.core.data.collection.EDimension;
import org.caleydo.core.event.EventListenerManager.ListenTo;
import org.caleydo.core.event.EventPublisher;
import org.caleydo.core.util.color.Color;
import org.caleydo.core.view.opengl.canvas.IGLMouseListener.IMouseEvent;
import org.caleydo.core.view.opengl.layout.Column.VAlign;
import org.caleydo.core.view.opengl.layout2.GLElement;
import org.caleydo.core.view.opengl.layout2.GLElementContainer;
import org.caleydo.core.view.opengl.layout2.GLGraphics;
import org.caleydo.core.view.opengl.layout2.GLSandBox;
import org.caleydo.core.view.opengl.layout2.ISWTLayer.ISWTLayerRunnable;
import org.caleydo.core.view.opengl.layout2.PickableGLElement;
import org.caleydo.core.view.opengl.layout2.basic.AInputBoxDialog.SetValueEvent;
import org.caleydo.core.view.opengl.layout2.layout.GLLayouts;
import org.caleydo.core.view.opengl.picking.Pick;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Display;
/**
* a simple basic widget for a slider
*
* @author Samuel Gratzl
*
*/
public class GLSlider extends PickableGLElement {
/**
* width of a gl slider
*/
private static final int BAR_WIDTH = 5;
public enum EValueVisibility {
NONE, VISIBLE, VISIBLE_HOVERED, VISIBLE_DRAGGED;
boolean show(boolean dragged, boolean hovered) {
return this == VISIBLE || (this == VISIBLE_HOVERED && hovered) || (this == VISIBLE_DRAGGED && dragged);
}
}
private ISelectionCallback callback = DUMMY_CALLBACK;
/**
* left and minimal value
*/
private float min = 0;
/**
* right and maximal value
*/
private float max = 1;
/**
* current value
*/
private float value = 0.5f;
/**
* if the user uses the mouse wheel to manipulate the value, how many DIPs are one mouse wheel rotation
*/
private float wheelInc = 1f;
private boolean hovered = false;
private boolean dragged = false;
/**
* horizontal or vertical rendering
*/
private EDimension dim = EDimension.DIMENSION;
/**
* show the value or not
*/
private EValueVisibility valueVisibility = EValueVisibility.VISIBLE;
private EValueVisibility minMaxVisibility = EValueVisibility.NONE;
/**
* the format string to use for rendering a value using {@link String#format(String, Object...)}
*/
private String valueFormat = "%.2f";
public GLSlider() {
}
public GLSlider(float min, float max, float value) {
this.min = min;
this.max = max;
this.value = clamp(value);
}
/**
* @param wheelInc
* setter, see {@link wheelInc}
*/
public GLSlider setWheelInc(float wheelInc) {
this.wheelInc = wheelInc;
return this;
}
/**
* @return the dim, see {@link #dim}
*/
public EDimension getDim() {
return dim;
}
/**
* @param dim
* setter, see {@link dim}
*/
public GLSlider setDim(EDimension dim) {
this.dim = dim;
return this;
}
/**
* @param valueFormat
* setter, see {@link valueFormat}
*/
public GLSlider setValueFormat(String valueFormat) {
this.valueFormat = valueFormat;
return this;
}
/**
* @param valueVisibility
* setter, see {@link valueVisibility}
*/
public GLSlider setValueVisibility(EValueVisibility valueVisibility) {
if (this.valueVisibility == valueVisibility)
return this;
this.valueVisibility = valueVisibility;
repaint();
return this;
}
/**
* @return the valueVisibility, see {@link #valueVisibility}
*/
public EValueVisibility getValueVisibility() {
return valueVisibility;
}
/**
* @param minMaxVisibility
* setter, see {@link minMaxVisibility}
*/
public GLSlider setMinMaxVisibility(EValueVisibility minMaxVisibility) {
if (this.minMaxVisibility == minMaxVisibility)
return this;
this.minMaxVisibility = minMaxVisibility;
repaint();
return this;
}
/**
* @return the minMaxVisibility, see {@link #minMaxVisibility}
*/
public EValueVisibility getMinMaxVisibility() {
return minMaxVisibility;
}
/**
* @return the value, see {@link #value}
*/
public float getValue() {
return value;
}
/**
* @param value
* setter, see {@link value}
*/
public GLSlider setValue(float value) {
value = clamp(value);
if (this.value == value)
return this;
this.value = value;
repaintAll();
fireCallback(value);
return this;
}
public GLSlider setMinMax(float min, float max) {
if (this.min == min && this.max == max)
return this;
this.min = min;
this.max = max;
this.value = clamp(value);
repaintAll();
return this;
}
/**
* @return the min, see {@link #min}
*/
public float getMin() {
return min;
}
/**
* @return the max, see {@link #max}
*/
public float getMax() {
return max;
}
protected final void fireCallback(float value) {
callback.onSelectionChanged(this, value);
}
/**
* @param callback
* setter, see {@link callback}
*/
public final GLSlider setCallback(ISelectionCallback callback) {
if (callback == null)
callback = DUMMY_CALLBACK;
if (this.callback == callback)
return this;
this.callback = callback;
return this;
}
@Override
protected void renderImpl(GLGraphics g, float w, float h) {
if (hovered || dragged)
g.color(Color.GRAY);
else
g.color(Color.LIGHT_GRAY);
boolean showText = valueVisibility.show(dragged, hovered);
boolean showMinMaxText = minMaxVisibility.show(dragged, hovered);
if (dim.isHorizontal()) {
float x = mapValue(w) + 1;
g.fillRect(x, 0, Math.min(BAR_WIDTH, w - x), h);
if (showMinMaxText) {
g.textColor(Color.DARK_GRAY);
g.drawText(format(min), 2, 4, w - 4, h - 11, VAlign.LEFT);
g.drawText(format(max), 2, 4, w - 5, h - 11, VAlign.RIGHT);
g.textColor(Color.BLACK);
}
if (showText)
g.drawText(format(value), 2, 2, w - 4, h - 8, VAlign.CENTER);
} else {
float y = mapValue(h) + 1;
g.fillRect(0, y, w, Math.min(BAR_WIDTH, h - y));
if (showText || showMinMaxText)
g.save().gl.glRotatef(90, 0, 0, 1);
if (showMinMaxText) {
g.textColor(Color.DARK_GRAY);
g.drawText(format(min), 2, 4 - w, h - 4, w - 11, VAlign.LEFT);
g.drawText(format(max), 2, 4 - w, h - 5, w - 11, VAlign.RIGHT);
g.textColor(Color.BLACK);
}
if (showText)
g.drawText(format(value), 2, 2 - w, h - 4, w - 8, VAlign.CENTER);
if (showText || showMinMaxText)
g.restore();
}
g.color(Color.BLACK).drawRect(0, 0, w, h);
}
protected String format(float v) {
return String.format(Locale.ENGLISH, valueFormat, v);
}
private float mapValue(float total) {
total -= BAR_WIDTH + 2;
float range = max - min;
float factor = total / range;
return (value - min) * factor;
}
private float unmapValue(float v) {
float total = dim.isHorizontal() ? getSize().x() : getSize().y();
total -= BAR_WIDTH + 2;
float range = max - min;
float factor = total / range;
return clamp(v / factor + min);
}
private float clamp(float v) {
return Math.max(min, Math.min(max, v));
}
@Override
protected void renderPickImpl(GLGraphics g, float w, float h) {
if (dim.isHorizontal()) {
float x = mapValue(w);
g.fillRect(x, 0, Math.min(BAR_WIDTH, w - x), h);
} else {
float y = mapValue(h);
g.fillRect(0, y, w, Math.min(BAR_WIDTH, h - y));
}
}
@Override
protected void onMouseOver(Pick pick) {
if (pick.isAnyDragging())
return;
hovered = true;
repaint();
}
@Override
protected void onMouseOut(Pick pick) {
if (!hovered)
return;
dragged = false;
hovered = false;
repaint();
}
@Override
protected void onMouseWheel(Pick pick) {
setValue(unmapValue(mapValue(getSize().x()) + ((IMouseEvent) (pick)).getWheelRotation() * wheelInc));
repaintAll();
}
@Override
protected void onDragDetected(Pick pick) {
if (pick.isAnyDragging())
return;
pick.setDoDragging(true);
this.dragged = true;
repaint();
}
@Override
protected void onDragged(Pick pick) {
if (!pick.isDoDragging())
return;
float v;
if (dim.isHorizontal()) {
v = mapValue(getSize().x()) + pick.getDx();
} else {
v = mapValue(getSize().y()) + pick.getDy();
}
setValue(unmapValue(v));
repaintAll();
}
@Override
protected void onMouseReleased(Pick pick) {
this.dragged = false;
repaint();
}
@Override
protected void onDoubleClicked(Pick pick) {
context.getSWTLayer().run(new ISWTLayerRunnable() {
@Override
public void run(Display display, Composite canvas) {
new InputBox(canvas).open();
}
});
}
/**
* callback interface for selection changes
*
* @author Samuel Gratzl
*
*/
public interface ISelectionCallback {
void onSelectionChanged(GLSlider slider, float value);
}
private static final ISelectionCallback DUMMY_CALLBACK = new ISelectionCallback() {
@Override
public void onSelectionChanged(GLSlider slider, float value) {
}
};
@ListenTo(sendToMe = true)
private void onSetValueEvent(SetValueEvent event) {
setValue(Float.parseFloat(event.getValue()));
}
private class InputBox extends AInputBoxDialog {
public InputBox(Composite canvas) {
super(null, "Set Value", GLSlider.this, canvas);
}
@Override
protected void set(String value) {
EventPublisher.trigger(new SetValueEvent(value).to(GLSlider.this));
}
@Override
protected String verify(String value) {
try {
float v = Float.parseFloat(value);
if (v < min)
return "Too small, needs to be in the range: [" + min + "," + max + "]";
if (v > max)
return "Too large, needs to be in the range: [" + min + "," + max + "]";
} catch (NumberFormatException e) {
return "The value: '" + value + "' can't be parsed to Float: " + e.getMessage();
}
return null;
}
@Override
protected String getInitialValue() {
return String.valueOf(getValue());
}
}
public static void main(String[] args) {
GLElementContainer c = new GLElementContainer(GLLayouts.flowHorizontal(2));
c.add(new GLElement());
c.add(new GLSlider(1, 20, 0.2f).setDim(EDimension.DIMENSION)
.setMinMaxVisibility(EValueVisibility.VISIBLE_DRAGGED)
.setSize(-1, 32));
c.add(new GLElement());
GLSandBox.main(args, c);
}
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.tasks.vcs;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.ui.DialogWrapper;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vcs.CheckinProjectPanel;
import com.intellij.openapi.vcs.FilePath;
import com.intellij.openapi.vcs.ProjectLevelVcsManager;
import com.intellij.openapi.vcs.VcsDirectoryMapping;
import com.intellij.openapi.vcs.changes.*;
import com.intellij.openapi.vcs.changes.committed.MockAbstractVcs;
import com.intellij.openapi.vcs.changes.shelf.ShelveChangesManager;
import com.intellij.openapi.vcs.changes.shelf.ShelvedChangeList;
import com.intellij.openapi.vcs.changes.ui.CommitHelper;
import com.intellij.openapi.vcs.checkin.CheckinHandler;
import com.intellij.openapi.vcs.impl.ProjectLevelVcsManagerImpl;
import com.intellij.openapi.vcs.impl.projectlevelman.AllVcses;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.tasks.*;
import com.intellij.tasks.actions.OpenTaskDialog;
import com.intellij.tasks.impl.*;
import com.intellij.testFramework.fixtures.CodeInsightFixtureTestCase;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.UIUtil;
import com.intellij.vcsUtil.VcsUtil;
import icons.TasksIcons;
import org.easymock.EasyMock;
import org.jetbrains.annotations.NotNull;
import javax.swing.*;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.List;
public class TaskVcsTest extends CodeInsightFixtureTestCase {
private TestRepository myRepository;
private MockAbstractVcs myVcs;
private MyMockChangeProvider myChangeProvider;
private ChangeListManagerImpl myChangeListManager;
private TaskManagerImpl myTaskManager;
public void testInitialState() {
assertEquals(1, myTaskManager.getLocalTasks().size());
final LocalTask defaultTask = myTaskManager.getLocalTasks().get(0);
assertEquals(defaultTask, myTaskManager.getActiveTask());
assertTrue(defaultTask.isDefault());
assertEquals(1, myChangeListManager.getChangeListsCopy().size());
assertEquals(1, defaultTask.getChangeLists().size());
assertEquals(defaultTask, myTaskManager.getAssociatedTask(myChangeListManager.getChangeListsCopy().get(0)));
assertEquals(defaultTask.getChangeLists().get(0).id, myChangeListManager.getChangeListsCopy().get(0).getId());
assertEquals(defaultTask.getChangeLists().get(0), new ChangeListInfo(myChangeListManager.getChangeListsCopy().get(0)));
}
public void testSwitchingTasks() {
final LocalTask defaultTask = myTaskManager.getLocalTasks().get(0);
Task task = myRepository.findTask("TEST-001");
assertNotNull(task);
activateAndCreateChangelist(task);
assertEquals(2, myTaskManager.getLocalTasks().size());
LocalTask localTask = myTaskManager.getActiveTask();
assertEquals(task, localTask);
assertEquals(1, localTask.getChangeLists().size());
assertEquals(1, defaultTask.getChangeLists().size());
assertEquals(2, myChangeListManager.getChangeListsCopy().size());
assertEquals(localTask, myTaskManager.getAssociatedTask(myChangeListManager.getChangeListsCopy().get(0)));
myTaskManager.activateTask(defaultTask, false);
assertEquals(1, localTask.getChangeLists().size());
assertEquals(1, defaultTask.getChangeLists().size());
assertEquals(2, myChangeListManager.getChangeListsCopy().size());
assertEquals(defaultTask, myTaskManager.getAssociatedTask(myChangeListManager.getChangeListsCopy().get(0)));
activateAndCreateChangelist(localTask);
assertEquals(1, localTask.getChangeLists().size());
assertEquals(1, defaultTask.getChangeLists().size());
assertEquals(2, myChangeListManager.getChangeListsCopy().size());
LocalChangeList activeChangeList = myChangeListManager.getDefaultChangeList();
LocalChangeList anotherChangeList = myChangeListManager.getChangeListsCopy().get(1 - myChangeListManager.getChangeListsCopy().indexOf(activeChangeList));
assertNotNull(activeChangeList);
assertEquals(localTask, myTaskManager.getAssociatedTask(activeChangeList));
assertEquals("TEST-001 Summary", activeChangeList.getName());
assertEquals(defaultTask, myTaskManager.getAssociatedTask(anotherChangeList));
assertEquals(LocalChangeList.DEFAULT_NAME, anotherChangeList.getName());
myTaskManager.activateTask(defaultTask, false);
myChangeListManager.waitUntilRefreshed();
assertEquals(1, localTask.getChangeLists().size());
assertEquals(1, defaultTask.getChangeLists().size());
assertEquals(2, myChangeListManager.getChangeListsCopy().size());
activeChangeList = myChangeListManager.getDefaultChangeList();
anotherChangeList = myChangeListManager.getChangeListsCopy().get(1 - myChangeListManager.getChangeListsCopy().indexOf(activeChangeList));
assertNotNull(activeChangeList);
assertEquals(defaultTask, myTaskManager.getAssociatedTask(activeChangeList));
assertEquals(LocalChangeList.DEFAULT_NAME, activeChangeList.getName());
assertEquals(localTask, myTaskManager.getAssociatedTask(anotherChangeList));
assertEquals("TEST-001 Summary", anotherChangeList.getName());
}
public void testAddChangeListViaCreateChangeListAction() {
Task task = myRepository.findTask("TEST-001");
assertNotNull(task);
activateAndCreateChangelist(task);
myChangeListManager.waitUntilRefreshed();
LocalTask defaultTask = myTaskManager.findTask(LocalTaskImpl.DEFAULT_TASK_ID);
assertNotNull(defaultTask);
activateAndCreateChangelist(defaultTask);
myChangeListManager.waitUntilRefreshed();
assertEquals(defaultTask, myTaskManager.getActiveTask());
LocalTask anotherTask = myTaskManager.findTask("TEST-001");
assertNotNull(anotherTask);
myTaskManager.createChangeList(defaultTask, "Default (1)");
myChangeListManager.waitUntilRefreshed();
assertEquals(1, anotherTask.getChangeLists().size());
assertEquals(2, defaultTask.getChangeLists().size());
assertEquals(3, myChangeListManager.getChangeListsCopy().size());
LocalChangeList defaultChangeListActive = myChangeListManager.findChangeList("Default (1)");
assertNotNull(defaultChangeListActive);
assertTrue(defaultChangeListActive.isDefault());
LocalChangeList defaultChangeListInactive = myChangeListManager.findChangeList(LocalChangeList.DEFAULT_NAME);
assertNotNull(defaultChangeListInactive);
LocalChangeList anotherChangeList = myChangeListManager.findChangeList("TEST-001 Summary");
assertNotNull(anotherChangeList);
assertEquals(defaultTask, myTaskManager.getAssociatedTask(defaultChangeListActive));
assertEquals("Default (1)", defaultChangeListActive.getName());
assertEquals(defaultTask, myTaskManager.getAssociatedTask(defaultChangeListInactive));
assertEquals(LocalChangeList.DEFAULT_NAME, defaultChangeListInactive.getName());
assertEquals(anotherTask, myTaskManager.getAssociatedTask(anotherChangeList));
assertEquals("TEST-001 Summary", anotherChangeList.getName());
}
public void testRemoveChangelistViaVcsAction() {
Task task = myRepository.findTask("TEST-001");
assertNotNull(task);
activateAndCreateChangelist(task);
myChangeListManager.waitUntilRefreshed();
LocalTask defaultTask = myTaskManager.findTask(LocalTaskImpl.DEFAULT_TASK_ID);
assertNotNull(defaultTask);
activateAndCreateChangelist(defaultTask);
myChangeListManager.waitUntilRefreshed();
assertEquals(defaultTask, myTaskManager.getActiveTask());
LocalTask anotherTask = myTaskManager.findTask("TEST-001");
assertNotNull(anotherTask);
LocalChangeList defaultChangeList = myChangeListManager.findChangeList(LocalChangeList.DEFAULT_NAME);
assertNotNull(defaultChangeList);
LocalChangeList anotherChangeList = myChangeListManager.findChangeList("TEST-001 Summary");
assertNotNull(anotherChangeList);
removeChangeList(anotherChangeList);
assertEquals(1, anotherTask.getChangeLists().size());
assertEquals(1, defaultTask.getChangeLists().size());
assertEquals(1, myChangeListManager.getChangeListsCopy().size());
assertEquals(defaultTask, myTaskManager.getAssociatedTask(defaultChangeList));
assertEquals(LocalChangeList.DEFAULT_NAME, defaultChangeList.getName());
}
private void activateAndCreateChangelist(Task task) {
LocalTaskImpl localTask = new LocalTaskImpl(task);
if (localTask.getChangeLists().isEmpty()) {
myTaskManager.createChangeList(localTask, myTaskManager.getChangelistName(localTask));
}
myTaskManager.activateTask(task, false);
}
public void testAddChangeListViaVcsAction() {
Task task = myRepository.findTask("TEST-001");
assertNotNull(task);
activateAndCreateChangelist(task);
myChangeListManager.waitUntilRefreshed();
LocalTask defaultTask = myTaskManager.findTask(LocalTaskImpl.DEFAULT_TASK_ID);
assertNotNull(defaultTask);
activateAndCreateChangelist(defaultTask);
myChangeListManager.waitUntilRefreshed();
assertEquals(defaultTask, myTaskManager.getActiveTask());
LocalTask anotherTask = myTaskManager.findTask("TEST-001");
assertNotNull(anotherTask);
addChangeList("Default (1)");
assertEquals(1, anotherTask.getChangeLists().size());
assertEquals(2, defaultTask.getChangeLists().size());
assertEquals(3, myChangeListManager.getChangeListsCopy().size());
LocalChangeList defaultChangeListActive = myChangeListManager.findChangeList(LocalChangeList.DEFAULT_NAME);
assertNotNull(defaultChangeListActive);
assertTrue(myChangeListManager.getDefaultListName(), defaultChangeListActive.isDefault());
LocalChangeList defaultChangeListInactive = myChangeListManager.findChangeList("Default (1)");
assertNotNull(defaultChangeListInactive);
LocalChangeList anotherChangeList = myChangeListManager.findChangeList("TEST-001 Summary");
assertNotNull(anotherChangeList);
assertEquals(defaultTask, myTaskManager.getAssociatedTask(defaultChangeListActive));
assertEquals(LocalChangeList.DEFAULT_NAME, defaultChangeListActive.getName());
assertEquals(defaultTask, myTaskManager.getAssociatedTask(defaultChangeListInactive));
assertEquals("Default (1)", defaultChangeListInactive.getName());
assertEquals(anotherTask, myTaskManager.getAssociatedTask(anotherChangeList));
assertEquals("TEST-001 Summary", anotherChangeList.getName());
}
public void testTrackContext() {
myTaskManager.getState().trackContextForNewChangelist = true;
addChangeList("New Changelist");
assertEquals(2, myTaskManager.getLocalTasks().size());
assertEquals(2, myChangeListManager.getChangeListsCopy().size());
LocalChangeList newChangeList = myChangeListManager.findChangeList("New Changelist");
assertNotNull(newChangeList);
LocalTask newTask = myTaskManager.getAssociatedTask(newChangeList);
assertNotNull(newTask);
assertEquals("New Changelist", newTask.getSummary());
myTaskManager.getState().trackContextForNewChangelist = false;
}
public void testCreateComment() {
myRepository.setShouldFormatCommitMessage(true);
myRepository.setCommitMessageFormat("{id} {summary} {number} {project}");
Task task = myRepository.findTask("TEST-001");
assertNotNull(task);
activateAndCreateChangelist(task);
myChangeListManager.waitUntilRefreshed();
LocalTask localTask = myTaskManager.getActiveTask();
assertNotNull(localTask);
assertEquals("TEST-001", localTask.getId());
List<ChangeListInfo> lists = localTask.getChangeLists();
assertEquals(1, lists.size());
assertEquals("TEST-001 Summary 001 TEST", lists.get(0).comment);
}
public void testSaveContextOnCommitForExistingTask() {
myTaskManager.getState().saveContextOnCommit = true;
assertEquals(1, myTaskManager.getLocalTasks().size());
Task task = myRepository.findTask("TEST-001");
assertNotNull(task);
assertEquals(1, myChangeListManager.getChangeListsCopy().size()); // default change list should be here
activateAndCreateChangelist(task);
myChangeListManager.waitUntilRefreshed();
assertEquals(2, myTaskManager.getLocalTasks().size());
List<LocalChangeList> copy = myChangeListManager.getChangeListsCopy();
assertEquals(copy.toString(), 2, copy.size());
LocalTask localTask = myTaskManager.getActiveTask();
List<ChangeListInfo> changelists = localTask.getChangeLists();
ChangeListInfo info = changelists.get(0);
LocalChangeList changeList = myChangeListManager.getChangeList(info.id);
assertNotNull(changeList);
List<Change> changes = addChanges(changeList);
commitChanges(changeList, changes);
assertEquals(2, myTaskManager.getLocalTasks().size()); // no extra task created
assertEquals(2, myChangeListManager.getChangeListsCopy().size());
assertEquals(localTask, myTaskManager.getAssociatedTask(changeList)); // association should survive
}
public void testSaveContextOnCommit() {
myTaskManager.getState().saveContextOnCommit = true;
assertEquals(1, myTaskManager.getLocalTasks().size());
assertEquals(1, myChangeListManager.getChangeListsCopy().size());
LocalChangeList changeList = addChangeList("New Changelist");
assertEquals(1, myTaskManager.getLocalTasks().size());
assertEquals(2, myChangeListManager.getChangeListsCopy().size());
List<Change> changes = addChanges(changeList);
commitChanges(changeList, changes);
assertEquals(2, myTaskManager.getLocalTasks().size()); // extra task created
assertEquals(2, myChangeListManager.getChangeListsCopy().size());
assertTrue(ContainerUtil.exists(myTaskManager.getLocalTasks(), task -> task.getSummary().equals("New Changelist")));
}
private void commitChanges(LocalChangeList changeList, List<Change> changes) {
String commitMessage = changeList.getName();
CheckinProjectPanel panel = EasyMock.createMock(CheckinProjectPanel.class);
EasyMock.expect(panel.getProject()).andReturn(getProject());
EasyMock.expect(panel.getCommitMessage()).andReturn(commitMessage);
EasyMock.replay(panel);
CheckinHandler checkinHandler = new TaskCheckinHandlerFactory().createHandler(panel, new CommitContext());
List<CheckinHandler> handlers = ContainerUtil.list(checkinHandler);
CommitHelper helper = new CommitHelper(getProject(), changeList, changes, "Commit", commitMessage, handlers, false, true,
new PseudoMap<>(), null);
helper.doCommit();
}
private LocalChangeList addChangeList(String title) {
final LocalChangeList list = myChangeListManager.addChangeList(title, "");
new TaskChangelistSupport(getProject(), myTaskManager).addControls(new JPanel(), null).consume(list);
return list;
}
private void removeChangeList(LocalChangeList changeList) {
myChangeListManager.removeChangeList(changeList);
myTaskManager.getChangeListListener().changeListRemoved(changeList);
}
@NotNull
private List<Change> addChanges(@NotNull LocalChangeList list) {
VirtualFile file = myFixture.getTempDirFixture().createFile("Test.txt");
FilePath path = VcsUtil.getFilePath(file);
Change change = new Change(null,
new CurrentContentRevision(path));
List<Change> changes = Collections.singletonList(change);
myChangeProvider.setChanges(changes);
VcsDirtyScopeManager.getInstance(getProject()).markEverythingDirty();
myChangeListManager.scheduleUpdate();
myChangeListManager.waitUntilRefreshed();
myChangeListManager.moveChangesTo(list, change);
myChangeListManager.waitUntilRefreshed();
LOG.debug(dumpChangeListManager());
return changes;
}
public void testProjectWithDash() {
LocalTaskImpl task = new LocalTaskImpl("foo-bar-001", "summary") {
@Override
public TaskRepository getRepository() {
return myRepository;
}
@Override
public boolean isIssue() {
return true;
}
};
assertEquals("foo-bar", task.getProject());
assertEquals("001", task.getNumber());
String name = myTaskManager.getChangelistName(task);
assertEquals("foo-bar-001 summary", name);
}
public void testIds() {
LocalTaskImpl task = new LocalTaskImpl("", "");
assertEquals("", task.getNumber());
assertEquals(null, task.getProject());
task = new LocalTaskImpl("-", "");
assertEquals("-", task.getNumber());
assertEquals(null, task.getProject());
task = new LocalTaskImpl("foo", "");
assertEquals("foo", task.getNumber());
assertEquals(null, task.getProject());
task = new LocalTaskImpl("112", "");
assertEquals("112", task.getNumber());
assertEquals(null, task.getProject());
}
public void testRestoreChangelist() {
final LocalTaskImpl task = new LocalTaskImpl("foo", "bar");
activateAndCreateChangelist(task);
activateAndCreateChangelist(new LocalTaskImpl("next", ""));
final String changelistName = myTaskManager.getChangelistName(task);
myChangeListManager.removeChangeList(changelistName);
myChangeListManager.invokeAfterUpdate(() -> {
assertTrue(myTaskManager.isLocallyClosed(task));
activateAndCreateChangelist(task);
assertNotNull(myChangeListManager.findChangeList(changelistName));
}, InvokeAfterUpdateMode.SYNCHRONOUS_NOT_CANCELLABLE, "foo", ModalityState.NON_MODAL);
}
public void testSuggestBranchName() {
Task task = myRepository.findTask("TEST-001");
assertNotNull(task);
assertTrue(task.isIssue());
assertEquals("TEST-001", myTaskManager.suggestBranchName(task));
LocalTaskImpl simple = new LocalTaskImpl("1", "simple");
assertEquals("simple", myTaskManager.suggestBranchName(simple));
LocalTaskImpl strange = new LocalTaskImpl("1", "very long and strange summary");
assertEquals("very-long", myTaskManager.suggestBranchName(strange));
myTaskManager.getState().branchNameFormat = "{id} {summary}";
LocalTaskImpl withIllegalSymbolsInIssue = new LocalTaskImpl("1", "contains Illegal$Symbols");
withIllegalSymbolsInIssue.setIssue(true);
assertEquals("1-contains-Illegal$Symbols", myTaskManager.suggestBranchName(withIllegalSymbolsInIssue));
}
public void testShelveChanges() {
LocalTask activeTask = myTaskManager.getActiveTask();
addChanges(myChangeListManager.getDefaultChangeList());
myTaskManager.getState().shelveChanges = true;
LocalTaskImpl task = new LocalTaskImpl("id", "summary");
OpenTaskDialog dialog = new OpenTaskDialog(getProject(), task);
try {
dialog.createTask();
assertEquals(dumpChangeListManager(), activeTask.getSummary(), activeTask.getShelfName());
List<ShelvedChangeList> lists = ShelveChangesManager.getInstance(getProject()).getShelvedChangeLists();
assertTrue(lists.stream().anyMatch(list -> list.DESCRIPTION.equals(activeTask.getShelfName())));
assertEmpty(myChangeListManager.getDefaultChangeList().getChanges());
myTaskManager.activateTask(activeTask, true);
Collection<Change> changes = myChangeListManager.getDefaultChangeList().getChanges();
assertNotEmpty(changes);
}
finally {
dialog.close(DialogWrapper.OK_EXIT_CODE);
}
UIUtil.dispatchAllInvocationEvents();
}
public void testAssociatedChangelist() {
ChangeListManager changeListManager = ChangeListManager.getInstance(getProject());
LocalChangeList changeList = changeListManager.getDefaultChangeList();
assertNotNull(changeList);
assertEquals(myTaskManager.getActiveTask(), myTaskManager.getAssociatedTask(changeList));
LocalTaskImpl bond = new LocalTaskImpl("007", "Bond");
TestRepository repository = new TestRepository();
repository.setShouldFormatCommitMessage(true);
bond.setRepository(repository);
myTaskManager.activateTask(bond, false);
assertEquals("007 Bond", new TaskCommitMessageProvider().getCommitMessage(changeList, getProject()));
}
@Override
public void setUp() throws Exception {
super.setUp();
myVcs = new MockAbstractVcs(getProject());
myChangeProvider = new MyMockChangeProvider();
myVcs.setChangeProvider(myChangeProvider);
myChangeListManager = (ChangeListManagerImpl)ChangeListManager.getInstance(getProject());
ProjectLevelVcsManagerImpl vcsManager = (ProjectLevelVcsManagerImpl)ProjectLevelVcsManager.getInstance(getProject());
vcsManager.registerVcs(myVcs);
vcsManager.setDirectoryMappings(Collections.singletonList(new VcsDirectoryMapping("", myVcs.getName())));
vcsManager.waitForInitialized();
assertTrue(vcsManager.hasActiveVcss());
myTaskManager = (TaskManagerImpl)TaskManager.getManager(getProject());
myRepository = new TestRepository();
myRepository.setTasks(new MyTask());
myTaskManager.setRepositories(Collections.singletonList(myRepository));
}
@Override
protected void tearDown() throws Exception {
try {
myTaskManager.setRepositories(Collections.emptyList());
AllVcses.getInstance(getProject()).unregisterManually(myVcs);
}
finally {
myTaskManager = null;
myVcs = null;
myChangeListManager = null;
super.tearDown();
}
}
@NotNull
private String dumpChangeListManager() {
return StringUtil.join(myChangeListManager.getChangeLists(), list -> {
return String.format("list: %s (%s) changes: %s", list.getName(), list.getId(), StringUtil.join(list.getChanges(), ", "));
}, "\n");
}
private static class MyMockChangeProvider implements ChangeProvider {
private List<Change> myChanges = Collections.emptyList();
public void setChanges(List<Change> changes) {
myChanges = changes;
}
@Override
public void getChanges(@NotNull VcsDirtyScope dirtyScope,
@NotNull final ChangelistBuilder builder,
@NotNull ProgressIndicator progress,
@NotNull ChangeListManagerGate addGate) {
for (Change change : myChanges) {
builder.processChange(change, MockAbstractVcs.getKey());
}
}
@Override
public boolean isModifiedDocumentTrackingRequired() {
return false;
}
@Override
public void doCleanup(List<VirtualFile> files) {
}
}
private class MyTask extends Task {
@NotNull
@Override
public String getId() {
return "TEST-001";
}
@NotNull
@Override
public String getSummary() {
return "Summary";
}
@Override
public String getDescription() {
return null;
}
@NotNull
@Override
public Comment[] getComments() {
return Comment.EMPTY_ARRAY;
}
@NotNull
@Override
public Icon getIcon() {
return TasksIcons.Unknown;
}
@NotNull
@Override
public TaskType getType() {
return TaskType.BUG;
}
@Override
public Date getUpdated() {
return null;
}
@Override
public Date getCreated() {
return null;
}
@Override
public boolean isClosed() {
return false;
}
@Override
public boolean isIssue() {
return true;
}
@Override
public String getIssueUrl() {
return null;
}
@Override
public TaskRepository getRepository() {
return myRepository;
}
}
}
| |
package mat.client.shared;
import com.google.gwt.cell.client.AbstractEditableCell;
import com.google.gwt.cell.client.ValueUpdater;
import com.google.gwt.core.shared.GWT;
import com.google.gwt.dom.client.BrowserEvents;
import com.google.gwt.dom.client.Element;
import com.google.gwt.dom.client.InputElement;
import com.google.gwt.dom.client.NativeEvent;
import com.google.gwt.event.dom.client.KeyCodes;
import com.google.gwt.safehtml.shared.SafeHtml;
import com.google.gwt.safehtml.shared.SafeHtmlBuilder;
import com.google.gwt.safehtml.shared.SafeHtmlUtils;
public class MatCheckBoxCell extends AbstractEditableCell<Boolean, Boolean> {
/**
* An html string representation of a checked input box.
*/
private String checkBoxTitle = "Click checkbox to select";
/** The depends on selection. */
private final boolean dependsOnSelection;
/** The handles selection. */
private final boolean handlesSelection;
/** The is used. */
private boolean isUsed;
/**
* Checks if is used.
*
* @return true, if is used
*/
public boolean isUsed() {
return isUsed;
}
/**
* Sets the used.
*
* @param isUsed
* the new used
*/
public void setUsed(boolean isUsed) {
this.isUsed = isUsed;
}
/**
* Construct a new {@link MatCheckBoxCell}.
*/
public MatCheckBoxCell() {
this(false);
}
/**
* Instantiates a new mat check box cell.
*
* @param dependsOnSelection the depends on selection
* @param handlesSelection the handles selection
* @param isEditable the is editable
*/
public MatCheckBoxCell(boolean dependsOnSelection, boolean handlesSelection, boolean isEditable) {
super(BrowserEvents.CHANGE, BrowserEvents.KEYDOWN);
this.dependsOnSelection = dependsOnSelection;
this.handlesSelection = handlesSelection;
this.isUsed = isEditable;
}
/**
* Construct a new {@link MatCheckBoxCell} that optionally controls selection.
*
* @param isSelectBox true if the cell controls the selection state
* @deprecated use {@link #CheckboxCell(boolean, boolean)} instead
*/
@Deprecated
public MatCheckBoxCell(boolean isSelectBox) {
this(isSelectBox, isSelectBox);
}
/**
* Construct a new {@link MatCheckBoxCell} that optionally controls selection.
*
* @param dependsOnSelection true if the cell depends on the selection state
* @param handlesSelection true if the cell modifies the selection state
*/
public MatCheckBoxCell(boolean dependsOnSelection, boolean handlesSelection) {
super(BrowserEvents.CHANGE, BrowserEvents.KEYDOWN);
this.dependsOnSelection = dependsOnSelection;
this.handlesSelection = handlesSelection;
}
public MatCheckBoxCell(boolean dependsOnSelection, boolean handlesSelection, String title) {
super(BrowserEvents.CHANGE, BrowserEvents.KEYDOWN);
this.dependsOnSelection = dependsOnSelection;
this.handlesSelection = handlesSelection;
checkBoxTitle = title;
}
/* (non-Javadoc)
* @see com.google.gwt.cell.client.AbstractCell#dependsOnSelection()
*/
@Override
public boolean dependsOnSelection() {
return dependsOnSelection;
}
/* (non-Javadoc)
* @see com.google.gwt.cell.client.AbstractCell#handlesSelection()
*/
@Override
public boolean handlesSelection() {
return handlesSelection;
}
/* (non-Javadoc)
* @see com.google.gwt.cell.client.AbstractCell#onBrowserEvent(com.google.gwt.cell.client.Cell.Context, com.google.gwt.dom.client.Element, java.lang.Object, com.google.gwt.dom.client.NativeEvent, com.google.gwt.cell.client.ValueUpdater)
*/
@Override
public void onBrowserEvent(Context context, Element parent, Boolean value,
NativeEvent event, ValueUpdater<Boolean> valueUpdater) {
String type = event.getType();
boolean enterPressed = BrowserEvents.KEYDOWN.equals(type)
&& event.getKeyCode() == KeyCodes.KEY_ENTER;
if ((BrowserEvents.CHANGE.equals(type) || enterPressed) && !isUsed) {
InputElement input = parent.getFirstChild().cast();
Boolean isChecked = input.isChecked();
/*
* Toggle the value if the enter key was pressed and the cell handles
* selection or doesn't depend on selection. If the cell depends on
* selection but doesn't handle selection, then ignore the enter key and
* let the SelectionEventManager determine which keys will trigger a
* change.
*/
if (enterPressed && (handlesSelection() || !dependsOnSelection())) {
isChecked = !isChecked;
input.setChecked(isChecked);
}
/*
* Save the new value. However, if the cell depends on the selection, then
* do not save the value because we can get into an inconsistent state.
*/
if (value != isChecked && !dependsOnSelection()) {
setViewData(context.getKey(), isChecked);
} else {
clearViewData(context.getKey());
}
if (valueUpdater != null) {
valueUpdater.update(isChecked);
}
}
}
/* (non-Javadoc)
* @see com.google.gwt.cell.client.AbstractEditableCell#isEditing(com.google.gwt.cell.client.Cell.Context, com.google.gwt.dom.client.Element, java.lang.Object)
*/
@Override
public boolean isEditing(com.google.gwt.cell.client.Cell.Context context,
Element parent, Boolean value) {
return false;
}
/* (non-Javadoc)
* @see com.google.gwt.cell.client.AbstractCell#render(com.google.gwt.cell.client.Cell.Context, java.lang.Object, com.google.gwt.safehtml.shared.SafeHtmlBuilder)
*/
@Override
public void render(Context context, Boolean value, SafeHtmlBuilder sb) {
/** The Constant INPUT_CHECKED. */
SafeHtml INPUT_CHECKED = SafeHtmlUtils.fromSafeConstant(
"<input type=\"checkbox\" tabindex=\"0\" title=\" " + checkBoxTitle + "\" checked/>");
/**
* An html string representation of an unchecked input box.
*/
SafeHtml INPUT_UNCHECKED = SafeHtmlUtils.fromSafeConstant(
"<input type=\"checkbox\" tabindex=\"0\" title=\"" + checkBoxTitle + "\" />");
/** The Constant INPUT_UNCHECKED_DISABLED. */
SafeHtml INPUT_UNCHECKED_DISABLED = SafeHtmlUtils.fromSafeConstant(
"<input type=\"checkbox\" tabindex=\"0\" disabled=\"disabled\" title=\"" + checkBoxTitle + "\" />");
/** The Constant INPUT_CHECKED_DISABLED. */
SafeHtml INPUT_CHECKED_DISABLED = SafeHtmlUtils.fromSafeConstant(
"<input type=\"checkbox\" tabindex=\"0\" disabled=\"disabled\" title=\"" + checkBoxTitle + "\" checked/>");
Boolean viewData = getViewData(context.getKey());
if (viewData != null && viewData.equals(value)) {
clearViewData(context.getKey());
viewData = null;
}
if (!isUsed) {
if (value != null && ((viewData != null) ? viewData : value)) {
sb.append(INPUT_CHECKED);
} else {
sb.append(INPUT_UNCHECKED);
}
} else {
if (value != null && ((viewData != null) ? viewData : value)) {
sb.append(INPUT_CHECKED_DISABLED);
} else {
sb.append(INPUT_UNCHECKED_DISABLED);
}
}
}
public void setTitle(String title) {
this.checkBoxTitle = title;
}
}
| |
/**
* Copyright 2015 Electric Cloud, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// WebSphereConfigListLoader.java --
//
// WebSphereConfigListLoader.java is part of ElectricCommander.
//
// Copyright (c) 2005-2011 Electric Cloud, Inc.
// All rights reserved.
//
package ecplugins.websphere.client;
import java.util.HashMap;
import java.util.Map;
import com.google.gwt.http.client.Request;
import com.google.gwt.http.client.RequestCallback;
import com.google.gwt.http.client.RequestException;
import com.google.gwt.http.client.Response;
import ecinternal.client.HasErrorPanel;
import ecinternal.client.Loader;
import com.electriccloud.commander.client.ChainedCallback;
import com.electriccloud.commander.gwt.client.Component;
import com.electriccloud.commander.client.domain.Property;
import com.electriccloud.commander.gwt.client.requests.CgiRequestProxy;
import com.electriccloud.commander.client.requests.GetPropertyRequest;
import com.electriccloud.commander.client.responses.CommanderError;
import com.electriccloud.commander.client.responses.PropertyCallback;
import com.electriccloud.commander.client.util.StringUtil;
import static ecinternal.client.InternalComponentBaseFactory.getPluginName;
public class WebSphereConfigListLoader
extends Loader
{
//~ Instance fields --------------------------------------------------------
private final WebSphereConfigList m_configList;
private final CgiRequestProxy m_cgiRequestProxy;
private String m_editorName;
//~ Constructors -----------------------------------------------------------
public WebSphereConfigListLoader(
WebSphereConfigList configList,
Component component,
ChainedCallback callback)
{
this(configList, null, component, callback);
}
public WebSphereConfigListLoader(
WebSphereConfigList configList,
String implementedMethod,
Component component,
ChainedCallback callback)
{
super(component, callback);
m_configList = configList;
m_cgiRequestProxy = new CgiRequestProxy(getPluginName(),
"websphere.cgi");
}
//~ Methods ----------------------------------------------------------------
@Override public void load()
{
Map<String, String> cgiParams = new HashMap<String, String>();
cgiParams.put("cmd", "getCfgList");
loadConfigs(cgiParams);
}
private void loadConfigs(Map<String, String> cgiParams)
{
try {
String request = m_cgiRequestProxy.issueGetRequest(cgiParams,
new RequestCallback() {
@Override public void onError(
Request request,
Throwable exception)
{
((HasErrorPanel) m_component).addErrorMessage(
"Error loading WebSphere configuration list: ",
exception);
}
@Override public void onResponseReceived(
Request request,
Response response)
{
String responseString = response.getText();
// if HTML returned we never made it to the CGI
Boolean isHtml = (responseString.indexOf(
"DOCTYPE HTML") != -1);
String error;
if (!isHtml) {
error = m_configList.parseResponse(
responseString);
}
else {
error = responseString;
}
if (m_component.getLog()
.isDebugEnabled()) {
m_component.getLog()
.debug(
"Recieved CGI response: "
+ responseString
+ " isHTML:" + isHtml
+ " error:" + error);
}
if (error != null) {
((HasErrorPanel) m_component).addErrorMessage(
error);
}
else {
if (StringUtil.isEmpty(m_editorName)
|| m_configList.isEmpty()) {
// We're done!
if (m_callback != null) {
m_callback.onComplete();
}
}
else {
loadEditors();
}
}
}
});
if (m_component.getLog()
.isDebugEnabled()) {
m_component.getLog()
.debug("Issued CGI request: " + request);
}
}
catch (RequestException e) {
if (m_component instanceof HasErrorPanel) {
((HasErrorPanel) m_component).addErrorMessage(
"Error loading WebSphere configuration list: ", e);
}
else {
m_component.getLog()
.error(e);
}
}
}
private void loadEditors()
{
GetPropertyRequest request =
m_requestFactory.createGetPropertyRequest();
request.setPropertyName("/plugins/EC-WebSphere/project/ui_forms/"
+ m_editorName);
request.setExpand(false);
request.setCallback(new EditorLoaderCallback("webspherecfg"));
m_requestManager.doRequest(new ChainedCallback() {
@Override public void onComplete()
{
// We're done!
if (m_callback != null) {
m_callback.onComplete();
}
}
}, request);
}
public void setEditorName(String editorName)
{
m_editorName = editorName;
}
//~ Inner Classes ----------------------------------------------------------
public class EditorLoaderCallback
implements PropertyCallback
{
//~ Instance fields ----------------------------------------------------
private final String m_configPlugin;
//~ Constructors -------------------------------------------------------
public EditorLoaderCallback(String configPlugin)
{
m_configPlugin = configPlugin;
}
//~ Methods ------------------------------------------------------------
@Override public void handleError(CommanderError error)
{
if (m_component instanceof HasErrorPanel) {
((HasErrorPanel) m_component).addErrorMessage(error);
}
else {
m_component.getLog()
.error(error);
}
}
@Override public void handleResponse(Property response)
{
if (m_component.getLog()
.isDebugEnabled()) {
m_component.getLog()
.debug("Commander getProperty request returned: "
+ response);
}
if (response != null) {
String value = response.getValue();
if (!StringUtil.isEmpty(value)) {
m_configList.setEditorDefinition(m_configPlugin, value);
return;
}
}
// There was no property value found in the response
String errorMsg = "Editor '" + m_editorName
+ "' not found for WebSphere plugin '" + m_configPlugin
+ "'";
if (m_component instanceof HasErrorPanel) {
((HasErrorPanel) m_component).addErrorMessage(errorMsg);
}
else {
m_component.getLog()
.error(errorMsg);
}
}
}
}
| |
/*
* @(#)BaseAttributeFactory.java
*
* Copyright 2004 Sun Microsystems, Inc. All Rights Reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistribution of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistribution in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* Neither the name of Sun Microsystems, Inc. or the names of contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* This software is provided "AS IS," without a warranty of any kind. ALL
* EXPRESS OR IMPLIED CONDITIONS, REPRESENTATIONS AND WARRANTIES, INCLUDING
* ANY IMPLIED WARRANTY OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE
* OR NON-INFRINGEMENT, ARE HEREBY EXCLUDED. SUN MICROSYSTEMS, INC. ("SUN")
* AND ITS LICENSORS SHALL NOT BE LIABLE FOR ANY DAMAGES SUFFERED BY LICENSEE
* AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THIS SOFTWARE OR ITS
* DERIVATIVES. IN NO EVENT WILL SUN OR ITS LICENSORS BE LIABLE FOR ANY LOST
* REVENUE, PROFIT OR DATA, OR FOR DIRECT, INDIRECT, SPECIAL, CONSEQUENTIAL,
* INCIDENTAL OR PUNITIVE DAMAGES, HOWEVER CAUSED AND REGARDLESS OF THE THEORY
* OF LIABILITY, ARISING OUT OF THE USE OF OR INABILITY TO USE THIS SOFTWARE,
* EVEN IF SUN HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
*
* You acknowledge that this software is not designed or intended for use in
* the design, construction, operation or maintenance of any nuclear facility.
*/
package org.wso2.balana.attr;
import org.wso2.balana.ParsingException;
import org.wso2.balana.UnknownIdentifierException;
import java.net.URI;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import org.w3c.dom.Node;
/**
* This is a basic implementation of <code>AttributeFactory</code>. It implements the insertion and
* retrieval methods, but doesn't actually setup the factory with any datatypes.
* <p>
* Note that while this class is thread-safe on all creation methods, it is not safe to add support
* for a new datatype while creating an instance of a value. This follows from the assumption that
* most people will initialize these factories up-front, and then start processing without ever
* modifying the factories. If you need these mutual operations to be thread-safe, then you should
* write a wrapper class that implements the right synchronization.
*
* @since 1.2
* @author Seth Proctor
*/
public class BaseAttributeFactory extends AttributeFactory {
// the map of proxies
private HashMap attributeMap;
/**
* Default constructor.
*/
public BaseAttributeFactory() {
attributeMap = new HashMap();
}
/**
* Constructor that configures this factory with an initial set of supported datatypes.
*
* @param attributes a <code>Map</code> of <code>String</code>s to </code>AttributeProxy</code>s
*
* @throws IllegalArgumentException if any elements of the Map are not
* </code>AttributeProxy</code>s
*/
public BaseAttributeFactory(Map attributes) {
attributeMap = new HashMap();
Iterator it = attributes.keySet().iterator();
while (it.hasNext()) {
try {
String id = (it.next()).toString();
AttributeProxy proxy = (AttributeProxy) (attributes.get(id));
attributeMap.put(id, proxy);
} catch (ClassCastException cce) {
throw new IllegalArgumentException("an element of the map "
+ "was not an instance of " + "AttributeProxy");
}
}
}
/**
* Adds a proxy to the factory, which in turn will allow new attribute types to be created using
* the factory. Typically the proxy is provided as an anonymous class that simply calls the
* getInstance methods (or something similar) of some <code>AttributeValue</code> class.
*
* @param id the name of the attribute type
* @param proxy the proxy used to create new attributes of the given type
*/
public void addDatatype(String id, AttributeProxy proxy) {
// make sure this doesn't already exist
if (attributeMap.containsKey(id))
throw new IllegalArgumentException("datatype already exists");
attributeMap.put(id, proxy);
}
/**
* Returns the datatype identifiers supported by this factory.
*
* @return a <code>Set</code> of <code>String</code>s
*/
public Set getSupportedDatatypes() {
return Collections.unmodifiableSet(attributeMap.keySet());
}
/**
* Creates a value based on the given DOM root node. The type of the attribute is assumed to be
* present in the node as an XACML attribute named <code>DataType</code>, as is the case with
* the AttributeValueType in the policy schema. The value is assumed to be the first child of
* this node.
*
* @param root the DOM root of an attribute value
*
* @return a new <code>AttributeValue</code>
*
* @throws UnknownIdentifierException if the type in the node isn't known to the factory
* @throws ParsingException if the node is invalid or can't be parsed by the appropriate proxy
*/
public AttributeValue createValue(Node root) throws UnknownIdentifierException,
ParsingException {
Node node = root.getAttributes().getNamedItem("DataType");
return createValue(root, node.getNodeValue());
}
/**
* Creates a value based on the given DOM root node and data type.
*
* @param root the DOM root of an attribute value
* @param dataType the type of the attribute
*
* @return a new <code>AttributeValue</code>
*
* @throws UnknownIdentifierException if the data type isn't known to the factory
* @throws ParsingException if the node is invalid or can't be parsed by the appropriate proxy
*/
public AttributeValue createValue(Node root, URI dataType) throws UnknownIdentifierException,
ParsingException {
return createValue(root, dataType.toString());
}
/**
* Creates a value based on the given DOM root node and data type.
*
* @param root the DOM root of an attribute value
* @param type the type of the attribute
*
* @return a new <code>AttributeValue</code>
*
* @throws UnknownIdentifierException if the type isn't known to the factory
* @throws ParsingException if the node is invalid or can't be parsed by the appropriate proxy
*/
public AttributeValue createValue(Node root, String type) throws UnknownIdentifierException,
ParsingException {
AttributeProxy proxy = (AttributeProxy) (attributeMap.get(type));
if (proxy != null) {
try {
return proxy.getInstance(root);
} catch (Exception e) {
throw new ParsingException("couldn't create " + type
+ " attribute based on DOM node");
}
} else {
throw new UnknownIdentifierException("Attributes of type " + type
+ " aren't supported.");
}
}
/**
* Creates a value based on the given data type and text-encoded value. Used primarily by code
* that does an XPath query to get an attribute value, and then needs to turn the resulting
* value into an Attribute class.
*
* @param dataType the type of the attribute
* @param value the text-encoded representation of an attribute's value
* @param params additional parameters that need to creates a value
* @return a new <code>AttributeValue</code>
*
* @throws UnknownIdentifierException if the data type isn't known to the factory
* @throws ParsingException if the text is invalid or can't be parsed by the appropriate proxy
*/
public AttributeValue createValue(URI dataType, String value, String[] params)
throws UnknownIdentifierException, ParsingException {
String type = dataType.toString();
AttributeProxy proxy = (AttributeProxy) (attributeMap.get(type));
if (proxy != null) {
try {
return proxy.getInstance(value, params);
} catch (Exception e) {
throw new ParsingException("couldn't create " + type + " attribute from input: "
+ value);
}
} else {
throw new UnknownIdentifierException("Attributes of type " + type
+ " aren't supported.");
}
}
}
| |
/*
* The contents of this file are subject to the terms
* of the Common Development and Distribution License
* (the "License"). You may not use this file except
* in compliance with the License.
*
* You can obtain a copy of the license at
* http://www.opensource.org/licenses/cddl1.php
* See the License for the specific language governing
* permissions and limitations under the License.
*/
/*
* Cookie.java
*
* Created on March 12, 2007, 5:01 PM
*
*/
package javax.ws.rs.core;
import javax.ws.rs.ext.RuntimeDelegate;
import javax.ws.rs.ext.RuntimeDelegate.HeaderDelegate;
import org.jboss.resteasy.jaxrs_api.i18n.Messages;
/**
* Represents the value of a HTTP cookie, transferred in a request.
* RFC 2109 specifies the legal characters for name,
* value, path and domain. The default version of 1 corresponds to RFC 2109.
*
* @see <a href="http://www.ietf.org/rfc/rfc2109.txt">IETF RFC 2109</a>
*/
public class Cookie
{
/**
* Cookies using the default version correspond to RFC 2109.
*/
public static final int DEFAULT_VERSION = 1;
private static final HeaderDelegate<Cookie> delegate =
RuntimeDelegate.getInstance().createHeaderDelegate(Cookie.class);
private String name;
private String value;
private int version;
private String path;
private String domain;
/**
* Create a new instance.
*
* @param name the name of the cookie
* @param value the value of the cookie
* @param path the URI path for which the cookie is valid
* @param domain the host domain for which the cookie is valid
* @param version the version of the specification to which the cookie complies
* @throws IllegalArgumentException if name is null
*/
public Cookie(String name, String value, String path, String domain, int version)
{
if (name == null)
throw new IllegalArgumentException(Messages.MESSAGES.nameIsNull());
this.name = name;
this.value = value;
this.version = version;
this.domain = domain;
this.path = path;
}
/**
* Create a new instance.
*
* @param name the name of the cookie
* @param value the value of the cookie
* @param path the URI path for which the cookie is valid
* @param domain the host domain for which the cookie is valid
* @throws IllegalArgumentException if name is null
*/
public Cookie(String name, String value, String path, String domain)
{
this(name, value, path, domain, DEFAULT_VERSION);
}
/**
* Create a new instance.
*
* @param name the name of the cookie
* @param value the value of the cookie
* @throws IllegalArgumentException if name is null
*/
public Cookie(String name, String value)
{
this(name, value, null, null);
}
/**
* Creates a new instance of Cookie by parsing the supplied string.
*
* @param value the cookie string
* @return the newly created Cookie
* @throws IllegalArgumentException if the supplied string cannot be parsed
* or is null
*/
public static Cookie valueOf(String value) throws IllegalArgumentException
{
return delegate.fromString(value);
}
/**
* Get the name of the cookie
*
* @return the name
*/
public String getName()
{
return name;
}
/**
* Get the value of the cookie
*
* @return the value
*/
public String getValue()
{
return value;
}
/**
* Get the version of the cookie
*
* @return the version
*/
public int getVersion()
{
return version;
}
/**
* Get the domain of the cookie
*
* @return the domain
*/
public String getDomain()
{
return domain;
}
/**
* Get the path of the cookie
*
* @return the path
*/
public String getPath()
{
return path;
}
/**
* Convert the cookie to a string suitable for use as the value of the
* corresponding HTTP header.
*
* @return a stringified cookie
*/
@Override
public String toString()
{
return delegate.toString(this);
}
/**
* Generate a hashcode by hashing all of the cookies properties
*
* @return the hashcode
*/
@Override
public int hashCode()
{
int hash = 7;
hash = 97 * hash + (this.name != null ? this.name.hashCode() : 0);
hash = 97 * hash + (this.value != null ? this.value.hashCode() : 0);
hash = 97 * hash + this.version;
hash = 97 * hash + (this.path != null ? this.path.hashCode() : 0);
hash = 97 * hash + (this.domain != null ? this.domain.hashCode() : 0);
return hash;
}
/**
* Compare for equality
*
* @param obj the object to compare to
* @return true if the object is a {@code Cookie} with the same value for
* all properties, false otherwise.
*/
@Override
public boolean equals(Object obj)
{
if (obj == null)
{
return false;
}
if (getClass() != obj.getClass())
{
return false;
}
final Cookie other = (Cookie) obj;
if (this.name != other.name && (this.name == null || !this.name.equals(other.name)))
{
return false;
}
if (this.value != other.value && (this.value == null || !this.value.equals(other.value)))
{
return false;
}
if (this.version != other.version)
{
return false;
}
if (this.path != other.path && (this.path == null || !this.path.equals(other.path)))
{
return false;
}
if (this.domain != other.domain && (this.domain == null || !this.domain.equals(other.domain)))
{
return false;
}
return true;
}
}
| |
/*
* Copyright 2012 Hai Bison
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package haibison.android.lockpattern.util;
import haibison.android.lockpattern.R;
import android.annotation.TargetApi;
import android.content.Context;
import android.content.SharedPreferences;
import android.os.Build;
import android.preference.PreferenceActivity;
import android.preference.PreferenceFragment;
import android.preference.PreferenceManager;
/**
* All settings for the library. They are stored in {@link SharedPreferences}.
* <p/>
* For some options, you can set them directly via tag {@code <meta-data>}
* inside tag {@code <activity>} in AndroidManifest.xml. Refer to setter methods
* for details. Note that the values in the manifest get higher priority than
* the ones from this class.
*
* @author Hai Bison
*
*/
public class AlpSettings {
/**
* This is singleton class.
*/
private AlpSettings() {
}// AlpSettings
/**
* Generates global preference filename of this library.
*
* @return the global preference filename.
*/
public static final String genPreferenceFilename() {
return String.format("%s_%s", Alp.LIB_NAME, Alp.UID);
}// genPreferenceFilename()
/**
* Generates global database filename. the database filename.
*
* @return the global database filename.
*/
public static final String genDatabaseFilename(String name) {
return String.format("%s_%s_%s", Alp.LIB_NAME, Alp.UID, name);
}// genDatabaseFilename()
/**
* Gets new {@link SharedPreferences}
*
* @param context
* the context.
* @return {@link SharedPreferences}
*/
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
public static SharedPreferences p(Context context) {
/*
* Always use application context.
*/
return context.getApplicationContext().getSharedPreferences(
genPreferenceFilename(), Context.MODE_MULTI_PROCESS);
}// p()
/**
* Setup {@code pm} to use global unique filename and global access mode.
* You must use this method if you let the user change preferences via UI
* (such as {@link PreferenceActivity}, {@link PreferenceFragment}...).
*
* @param context
* the context.
* @param pm
* {@link PreferenceManager}.
* @since v2.6 beta
*/
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
public static void setupPreferenceManager(Context context,
PreferenceManager pm) {
pm.setSharedPreferencesMode(Context.MODE_MULTI_PROCESS);
pm.setSharedPreferencesName(genPreferenceFilename());
}// setupPreferenceManager()
/**
* Display preferences.
*
* @author Hai Bison
*
*/
public static class Display {
/**
* Name to use for tag {@code <meta-data>} in AndroidManifest.xml.
*
* @see #setStealthMode(Context, boolean)
*/
public static final String METADATA_STEALTH_MODE = "stealthMode";
/**
* Name to use for tag {@code <meta-data>} in AndroidManifest.xml.
*
* @see #setMinWiredDots(Context, int)
*/
public static final String METADATA_MIN_WIRED_DOTS = "minWiredDots";
/**
* Name to use for tag {@code <meta-data>} in AndroidManifest.xml.
*
* @see #setMaxRetries(Context, int)
*/
public static final String METADATA_MAX_RETRIES = "maxRetries";
/**
* Name to use for tag {@code <meta-data>} in AndroidManifest.xml.
*
* @see #setCaptchaWiredDots(Context, int)
*/
public static final String METADATA_CAPTCHA_WIRED_DOTS = "captchaWiredDots";
/**
* This is singleton class.
*/
private Display() {
}// Display
/**
* Checks if the library is using stealth mode or not.
*
* @param context
* the context.
* @return {@code true} or {@code false}. Default is {@code false}.
*/
public static boolean isStealthMode(Context context) {
return p(context)
.getBoolean(
context.getString(R.string.alp_42447968_pkey_display_stealth_mode),
context.getResources()
.getBoolean(
R.bool.alp_42447968_pkey_display_stealth_mode_default));
}// isStealthMode()
/**
* Sets stealth mode.
* <p/>
* You can set this value in AndroidManifest.xml with
* {@link #METADATA_STEALTH_MODE}.
*
* @param context
* the context.
* @param v
* the value.
*/
public static void setStealthMode(Context context, boolean v) {
p(context)
.edit()
.putBoolean(
context.getString(R.string.alp_42447968_pkey_display_stealth_mode),
v).commit();
}// setStealthMode()
/**
* Gets minimum wired dots allowed for a pattern.
*
* @param context
* the context.
* @return the minimum wired dots allowed for a pattern. Default is
* {@code 4}.
*/
public static int getMinWiredDots(Context context) {
return p(context)
.getInt(context
.getString(R.string.alp_42447968_pkey_display_min_wired_dots),
context.getResources()
.getInteger(
R.integer.alp_42447968_pkey_display_min_wired_dots_default));
}// getMinWiredDots()
/**
* Validates min wired dots.
*
* @param context
* the context.
* @param v
* the input value.
* @return the correct value.
*/
public static int validateMinWiredDots(Context context, int v) {
if (v <= 0 || v > 9)
v = context
.getResources()
.getInteger(
R.integer.alp_42447968_pkey_display_min_wired_dots_default);
return v;
}// validateMinWiredDots()
/**
* Sets minimum wired dots allowed for a pattern.
* <p/>
* You can set this value in AndroidManifest.xml with
* {@link #METADATA_MIN_WIRED_DOTS}.
*
* @param context
* the context.
* @param v
* the minimum wired dots allowed for a pattern.
*/
public static void setMinWiredDots(Context context, int v) {
v = validateMinWiredDots(context, v);
p(context)
.edit()
.putInt(context
.getString(R.string.alp_42447968_pkey_display_min_wired_dots),
v).commit();
}// setMinWiredDots()
/**
* Gets max retries allowed in mode comparing pattern.
*
* @param context
* the context.
* @return the max retries allowed in mode comparing pattern. Default is
* {@code 5}.
*/
public static int getMaxRetries(Context context) {
return p(context)
.getInt(context
.getString(R.string.alp_42447968_pkey_display_max_retries),
context.getResources()
.getInteger(
R.integer.alp_42447968_pkey_display_max_retries_default));
}// getMaxRetries()
/**
* Validates max retries.
*
* @param context
* the context.
* @param v
* the input value.
* @return the correct value.
*/
public static int validateMaxRetries(Context context, int v) {
if (v <= 0)
v = context
.getResources()
.getInteger(
R.integer.alp_42447968_pkey_display_max_retries_default);
return v;
}// validateMaxRetries()
/**
* Sets max retries allowed in mode comparing pattern.
* <p/>
* You can set this value in AndroidManifest.xml with
* {@link #METADATA_MAX_RETRIES}.
*
* @param context
* the context.
* @param v
* the max retries allowed in mode comparing pattern.
*/
public static void setMaxRetries(Context context, int v) {
v = validateMaxRetries(context, v);
p(context)
.edit()
.putInt(context
.getString(R.string.alp_42447968_pkey_display_max_retries),
v).commit();
}// setMaxRetries()
/**
* Gets wired dots for a "CAPTCHA" pattern.
*
* @param context
* the context.
* @return the wired dots for a "CAPTCHA" pattern. Default is {@code 4}.
*/
public static int getCaptchaWiredDots(Context context) {
return p(context)
.getInt(context
.getString(R.string.alp_42447968_pkey_display_captcha_wired_dots),
context.getResources()
.getInteger(
R.integer.alp_42447968_pkey_display_captcha_wired_dots_default));
}// getCaptchaWiredDots()
/**
* Validates CAPTCHA wired dots.
*
* @param context
* the context.
* @param v
* the input value.
* @return the correct value.
*/
public static int validateCaptchaWiredDots(Context context, int v) {
if (v <= 0 || v > 9)
v = context
.getResources()
.getInteger(
R.integer.alp_42447968_pkey_display_captcha_wired_dots_default);
return v;
}// validateCaptchaWiredDots()
/**
* Sets wired dots for a "CAPTCHA" pattern.
* <p/>
* You can set this value in AndroidManifest.xml with
* {@link #METADATA_CAPTCHA_WIRED_DOTS}.
*
* @param context
* the context.
* @param v
* the wired dots for a "CAPTCHA" pattern.
*/
public static void setCaptchaWiredDots(Context context, int v) {
v = validateCaptchaWiredDots(context, v);
p(context)
.edit()
.putInt(context
.getString(R.string.alp_42447968_pkey_display_captcha_wired_dots),
v).commit();
}// setCaptchaWiredDots()
}// Display
/**
* Security preferences.
*
* @author Hai Bison
*
*/
public static class Security {
/**
* Name to use for tag {@code <meta-data>} in AndroidManifest.xml.
*
* @see #setEncrypterClass(Context, char[])
* @see #setEncrypterClass(Context, Class)
*/
public static final String METADATA_ENCRYPTER_CLASS = "encrypterClass";
/**
* Name to use for tag {@code <meta-data>} in AndroidManifest.xml.
*
* @see #setAutoSavePattern(Context, boolean)
*/
public static final String METADATA_AUTO_SAVE_PATTERN = "autoSavePattern";
/**
* This is singleton class.
*/
private Security() {
}// Security
/**
* Checks if the library is using auto-save pattern mode.
*
* @param context
* the context.
* @return {@code true} or {@code false}. Default is {@code false}.
*/
public static boolean isAutoSavePattern(Context context) {
return p(context)
.getBoolean(
context.getString(R.string.alp_42447968_pkey_sys_auto_save_pattern),
context.getResources()
.getBoolean(
R.bool.alp_42447968_pkey_sys_auto_save_pattern_default));
}// isAutoSavePattern()
/**
* Sets auto-save pattern mode.
* <p/>
* You can set this value in AndroidManifest.xml with
* {@link #METADATA_AUTO_SAVE_PATTERN}.
*
* @param context
* the context.
* @param v
* the auto-save mode.
*/
public static void setAutoSavePattern(Context context, boolean v) {
p(context)
.edit()
.putBoolean(
context.getString(R.string.alp_42447968_pkey_sys_auto_save_pattern),
v).commit();
if (!v)
setPattern(context, null);
}// setAutoSavePattern()
/**
* Gets the pattern.
*
* @param context
* the context.
* @return the pattern. Default is {@code null}.
*/
public static char[] getPattern(Context context) {
String pattern = p(context).getString(
context.getString(R.string.alp_42447968_pkey_sys_pattern),
null);
return pattern == null ? null : pattern.toCharArray();
}// getPattern()
/**
* Sets the pattern.
*
* @param context
* the context.
* @param pattern
* the pattern, can be {@code null} to reset it.
*/
public static void setPattern(Context context, char[] pattern) {
p(context)
.edit()
.putString(
context.getString(R.string.alp_42447968_pkey_sys_pattern),
pattern != null ? new String(pattern) : null)
.commit();
}// setPattern()
/**
* Gets encrypter class.
*
* @param context
* the context.
* @return the full name of encrypter class. Default is {@code null}.
*/
public static char[] getEncrypterClass(Context context) {
String clazz = p(context)
.getString(
context.getString(R.string.alp_42447968_pkey_sys_encrypter_class),
null);
return clazz == null ? null : clazz.toCharArray();
}// getEncrypterClass()
/**
* Sets encrypter class.
* <p/>
* You can set this value in AndroidManifest.xml with
* {@link #METADATA_ENCRYPTER_CLASS}.
*
* @param context
* the context.
* @param clazz
* the encrypter class, can be {@code null} if you don't want
* to use it.
*/
public static void setEncrypterClass(Context context, Class<?> clazz) {
setEncrypterClass(context, clazz != null ? clazz.getName()
.toCharArray() : null);
}// setEncrypterClass()
/**
* Sets encrypter class.
* <p/>
* You can set this value in AndroidManifest.xml with
* {@link #METADATA_ENCRYPTER_CLASS}.
*
* @param context
* the context.
* @param clazz
* the full name of encrypter class, can be {@code null} if
* you don't want to use it.
*/
public static void setEncrypterClass(Context context, char[] clazz) {
p(context)
.edit()
.putString(
context.getString(R.string.alp_42447968_pkey_sys_encrypter_class),
clazz != null ? new String(clazz) : null).commit();
}// setEncrypterClass()
}// Security
}
| |
package com.cordys.coe.tools.useradmin.cordys;
import com.cordys.coe.tools.useradmin.cordys.XMLStoreCache.XSCache;
import com.cordys.coe.tools.useradmin.cordys.exception.CordysException;
import com.cordys.cpc.bsf.busobject.BSF;
import com.cordys.cpc.bsf.soap.SOAPRequestObject;
import com.eibus.xml.nom.Document;
import com.eibus.xml.nom.Node;
import com.eibus.xml.xpath.XPath;
import com.eibus.xml.xpath.XPathMetaInfo;
/**
* Class to represent XMLStore with methods to read XMLObject, etc
*
* @author kekema
*
*/
public class XMLStore
{
static XSCache xsCache = new XSCache();
public static XMLStoreObject getXMLObject(String key)
{
return (getXMLObject(key, false));
}
public static XMLStoreObject getXMLObjectByCache(String key)
{
return (xsCache.getXMLStoreObject(key));
}
/**
* Get an XMLObject from XMLStore
*
* @param key
* @param isvVersion either load the isv version or the version as available (user/org/isv)
*
* @return XMLObject instance
*/
public static XMLStoreObject getXMLObject(String key, boolean isvVersion)
{
int response = 0;
int resultNode = 0;
String level = null;
String lastModified = null;
String namespace = "http://schemas.cordys.com/1.0/xmlstore";
String methodName = "GetXMLObject";
String[] paramNames = new String[] { "key" };
Object[] paramValues = new Object[] { key };
SOAPRequestObject sro = null;
int paramNode = 0;
try
{
if (isvVersion)
{
sro = new SOAPRequestObject(namespace, methodName, null, null);
Document nomDocument = BSF.getXMLDocument();
paramNode = nomDocument.parseString("<key version=\"isv\">"+key+"</key>");
sro.addParameterAsXml(paramNode);
}
else
{
sro = new SOAPRequestObject(namespace, methodName, paramNames, paramValues);
}
response = sro.execute();
if (response != 0)
{
// get level/lastmodified
int nNode = XPath.getFirstMatch("//tuple", new XPathMetaInfo(), response);
if (nNode > 0)
{
level = Node.getAttribute(nNode, "level");
lastModified = Node.getAttribute(nNode, "lastModified");
}
// get tuple/old
nNode = XPath.getFirstMatch("//old", new XPathMetaInfo(), response);
if (nNode >0)
{
nNode = Node.getFirstChildElement(nNode);
if (nNode > 0)
{
resultNode = Node.clone(nNode, true);
}
}
}
}
catch (Exception e)
{
throw new CordysException("Not able to read xml " + key + " from XMLStore.", e);
}
finally
{
if (response > 0)
{
Node.delete(response);
response = 0;
}
if (paramNode > 0)
{
Node.delete(paramNode);
paramNode = 0;
}
}
// compose resulting XMLObject
XMLStoreObject xmlObject = null;
if (resultNode > 0)
{
xmlObject = new XMLStoreObject(resultNode);
xmlObject.setXmlStoreLastModified(lastModified);
xmlObject.setXmlStoreLevel(level);
}
return xmlObject;
}
/**
* Update XMLStore Object
*
* @param key
* @param oldObject
* @param newObject
*/
public static void updateXMLObject(String key, XMLStoreObject oldObject, XMLStoreObject newObject)
{
int response = 0;
String namespace = "http://schemas.cordys.com/1.0/xmlstore";
String methodName = "UpdateXMLObject";
int tupleNode = 0;
try
{
SOAPRequestObject sro = new SOAPRequestObject(namespace, methodName, null, null);
Document nomDocument = BSF.getXMLDocument();
// compose payload with tuple old/new
String payload = "<tuple key=\""+key+"\" lastModified=\""+oldObject.getXmlStoreLastModified()+"\"><old>" + oldObject.toString() + "</old><new>" + newObject.toString() + "</new></tuple>";
tupleNode = nomDocument.parseString(payload);
Node.removeAttributesRecursive(tupleNode, "xmlns:SOAP", "xmlns", null, null);
sro.addParameterAsXml(tupleNode);
response = sro.execute();
}
catch (Exception e)
{
throw new CordysException("Not able to update xml " + key + " in XMLStore.", e);
}
finally
{
if (response > 0)
{
Node.delete(response);
response = 0;
}
if (tupleNode > 0)
{
Node.delete(tupleNode);
tupleNode = 0;
}
}
}
/**
* Delete XMLStore Object
*
* @param key
* @param lastModified
* @param level
* @param original
* @param name
*/
public static void deleteXMLObject(String key, String lastModified, String level, String original, String name)
{
int response = 0;
String namespace = "http://schemas.cordys.com/1.0/xmlstore";
String methodName = "UpdateXMLObject";
int tupleNode = 0;
try
{
SOAPRequestObject sro = new SOAPRequestObject(namespace, methodName, null, null);
Document nomDocument = BSF.getXMLDocument();
// compose payload with tuple
String payload = "<tuple key=\"" + key + "\" lastModified=\"" + lastModified + "\" level=\"" + level + "\" original=\"" + original + "\" name=\"" + name +"\"/>";
tupleNode = nomDocument.parseString(payload);
Node.removeAttributesRecursive(tupleNode, "xmlns:SOAP", "xmlns", null, null);
sro.addParameterAsXml(tupleNode);
response = sro.execute();
}
catch (Exception e)
{
throw new CordysException("Not able to delete xml " + key + " in XMLStore.", e);
}
finally
{
if (response > 0)
{
Node.delete(response);
response = 0;
}
if (tupleNode > 0)
{
Node.delete(tupleNode);
tupleNode = 0;
}
// invalidate any entry in the XMLStore cache
xsCache.invalidate(key);
}
}
}
| |
/*******************************************************************************
*
* Copyright FUJITSU LIMITED 2017
*
* Creation Date: Jul 10, 2012
*
*******************************************************************************/
package org.oscm.serviceprovisioningservice.bean;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyListOf;
import static org.mockito.Matchers.anyLong;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.util.*;
import org.junit.Before;
import org.junit.Test;
import org.oscm.dataservice.local.DataService;
import org.oscm.domobjects.Organization;
import org.oscm.domobjects.OrganizationRole;
import org.oscm.domobjects.OrganizationToRole;
import org.oscm.domobjects.PlatformUser;
import org.oscm.domobjects.PriceModel;
import org.oscm.domobjects.Product;
import org.oscm.domobjects.RoleAssignment;
import org.oscm.domobjects.Subscription;
import org.oscm.domobjects.UserRole;
import org.oscm.domobjects.enums.LocalizedObjectTypes;
import org.oscm.i18nservice.local.LocalizerServiceLocal;
import org.oscm.internal.types.enumtypes.OrganizationRoleType;
import org.oscm.internal.types.enumtypes.PriceModelType;
import org.oscm.internal.types.enumtypes.UserRoleType;
import org.oscm.internal.types.exception.ObjectNotFoundException;
import org.oscm.internal.types.exception.OperationNotPermittedException;
import org.oscm.internal.vo.VOLocalizedText;
import org.oscm.internal.vo.VOPriceModel;
import org.oscm.internal.vo.VOPriceModelLocalization;
import org.oscm.internal.vo.VOService;
import org.oscm.internal.vo.VOServiceDetails;
import org.oscm.internal.vo.VOServiceLocalization;
import org.oscm.internal.vo.VOTechnicalService;
import org.oscm.serviceprovisioningservice.auditlog.PriceModelAuditLogCollector;
import org.oscm.serviceprovisioningservice.auditlog.ServiceAuditLogCollector;
import org.oscm.subscriptionservice.auditlog.SubscriptionAuditLogCollector;
public class ServiceProvisioningServiceBeanLocalizationTest {
private static long tkey = 0;
private ServiceProvisioningServiceBean sps;
private ServiceProvisioningServiceLocalizationBean spsLocalizer;
private LocalizerServiceLocal localizer;
private ServiceAuditLogCollector audit;
private PriceModelAuditLogCollector priceModelAudit;
private SubscriptionAuditLogCollector subscriptionAudit;
private DataService ds;
@Before
public void setup() throws Exception {
ds = mock(DataService.class);
audit = mock(ServiceAuditLogCollector.class);
priceModelAudit = mock(PriceModelAuditLogCollector.class);
subscriptionAudit = mock(SubscriptionAuditLogCollector.class);
sps = spy(new ServiceProvisioningServiceBean());
sps.dm = ds;
localizer = mock(LocalizerServiceLocal.class);
doNothing().when(localizer).setLocalizedValues(anyLong(),
any(LocalizedObjectTypes.class),
anyListOf(VOLocalizedText.class));
sps.localizer = localizer;
spsLocalizer = spy(new ServiceProvisioningServiceLocalizationBean());
spsLocalizer.ds = ds;
spsLocalizer.localizer = localizer;
spsLocalizer.serviceAudit = audit;
spsLocalizer.priceModelAudit = priceModelAudit;
spsLocalizer.subscriptionAudit = subscriptionAudit;
sps.spsLocalizer = spsLocalizer;
}
@Test
public void checkIsAllowedForLocalizingService_resellerOtherService()
throws Exception {
// given
Organization o = givenResellerOrganization();
givenCurrentUserForOrganization(o);
Organization otherOrganization = givenOtherOrganization();
otherOrganization.setKey(o.getKey() + 100);
Product givenProduct = givenProduct(otherOrganization);
// when
boolean result = spsLocalizer
.checkIsAllowedForLocalizingService(givenProduct.getKey());
// then
assertFalse(result);
}
@Test
public void checkIsAllowedForLocalizingService_resellerOwnsService()
throws Exception {
// given
Organization o = givenResellerOrganization();
Product givenProduct = givenProduct(o);
givenCurrentUserForOrganization(o);
// when
boolean result = spsLocalizer
.checkIsAllowedForLocalizingService(givenProduct.getKey());
// then
assertTrue(result);
}
@Test
public void checkIsAllowedForLocalizingService_brokerOtherService()
throws Exception {
// given
Organization o = givenBrokerOrganization();
givenCurrentUserForOrganization(o);
Organization otherOrganization = givenOtherOrganization();
givenProduct(otherOrganization);
// when
boolean result = spsLocalizer.checkIsAllowedForLocalizingService(1L);
// then
assertFalse(result);
}
@Test
public void checkIsAllowedForLocalizingService_customer_owningSubscription()
throws Exception {
// given
Organization customerOrganization = givenCustomerOrganization();
givenCurrentUserForOrganization(customerOrganization);
givenSubscriptionForProduct(givenProduct(givenSupplierOrganization()),
customerOrganization);
// when
boolean result = spsLocalizer.checkIsAllowedForLocalizingService(1L);
// then
assertTrue(result);
}
@Test
public void checkIsAllowedForLocalizingService_customer_notOwningSubscription()
throws Exception {
// given
Organization customerOrganization = givenCustomerOrganization();
givenCurrentUserForOrganization(customerOrganization);
Organization supplierOrganization = givenSupplierOrganization();
givenSubscriptionForProduct(givenProduct(supplierOrganization),
supplierOrganization);
// when
boolean result = spsLocalizer.checkIsAllowedForLocalizingService(1L);
// then
assertFalse(result);
}
@Test
public void checkIsAllowedForLocalizingService_customer_targetCustomer()
throws Exception {
// given
Organization customerOrganization = givenCustomerOrganization();
givenCurrentUserForOrganization(customerOrganization);
Product givenProduct = givenProduct(givenSupplierOrganization());
givenProduct.setTargetCustomer(customerOrganization);
// when
boolean result = spsLocalizer.checkIsAllowedForLocalizingService(1L);
// then
assertTrue(result);
}
@Test
public void checkIsAllowedForLocalizingService_customer_notTargetCustomer()
throws Exception {
// given
Organization supplierOrganization = givenSupplierOrganization();
Organization customerOrganization = givenCustomerOrganization();
givenCurrentUserForOrganization(customerOrganization);
givenProduct(supplierOrganization);
// when
boolean result = spsLocalizer.checkIsAllowedForLocalizingService(1L);
// then
assertFalse(result);
}
private Subscription givenSubscriptionForProduct(Product givenProduct,
Organization organization) {
Subscription s = new Subscription();
s.setKey(1L);
s.setOrganization(organization);
givenProduct.setOwningSubscription(s);
return s;
}
private Organization givenBrokerOrganization() {
Organization o = givenOrganization();
grantRole(o, OrganizationRoleType.BROKER);
return o;
}
private Organization givenResellerOrganization() {
Organization o = givenOrganization();
grantRole(o, OrganizationRoleType.RESELLER);
return o;
}
private Organization givenSupplierOrganization() {
Organization o = givenOrganization();
grantRole(o, OrganizationRoleType.SUPPLIER);
return o;
}
private Organization givenCustomerOrganization() {
Organization o = givenOrganization();
grantRole(o, OrganizationRoleType.CUSTOMER);
return o;
}
private Organization givenOtherOrganization() {
Organization o = givenOrganization();
o.setKey(tkey++);
return o;
}
private void grantRole(Organization o, OrganizationRoleType roleType) {
OrganizationRole role = new OrganizationRole();
role.setRoleName(roleType);
OrganizationToRole otr = new OrganizationToRole();
otr.setOrganization(o);
otr.setOrganizationRole(role);
Set<OrganizationToRole> grantedRoles = new HashSet<OrganizationToRole>();
grantedRoles.add(otr);
o.setGrantedRoles(grantedRoles);
}
private Organization givenOrganization() {
Organization o = new Organization();
Set<OrganizationToRole> roles = new HashSet<OrganizationToRole>();
o.setKey(tkey++);
o.setGrantedRoles(roles);
return o;
}
private PlatformUser givenCurrentUserForOrganization(
Organization organization) {
PlatformUser u = new PlatformUser();
u.setKey(1L);
u.setOrganization(organization);
for (UserRoleType role : OrganizationRoleType
.correspondingUserRoles(organization.getGrantedRoleTypes())) {
RoleAssignment ra = new RoleAssignment();
ra.setRole(new UserRole(role));
u.getAssignedRoles().add(ra);
}
when(ds.getCurrentUser()).thenReturn(u);
return u;
}
private Product givenProduct(Organization o) throws ObjectNotFoundException {
Product p = new Product();
p.setKey(1L);
p.setVendor(o);
when(ds.getReference(Product.class, 1L)).thenReturn(p);
return p;
}
@Test
public void saveServiceLocalization_serviceDescription_asReseller()
throws Exception {
// given
VOServiceLocalization sl = givenServiceLocalization("localizedDescription");
VOService service = givenVoService();
doReturn(Boolean.TRUE).when(spsLocalizer)
.checkIsAllowedForLocalizingService(anyLong());
givenCurrentUserForOrganization(givenResellerOrganization());
doReturn(sl).when(spsLocalizer).getServiceLocalization(
any(Product.class));
// when
sps.saveServiceLocalization(service, sl);
// then
verify(localizer, never()).storeLocalizedResource(
sl.getDescriptions().get(0).getLocale(), 1L,
LocalizedObjectTypes.PRODUCT_MARKETING_DESC,
sl.getDescriptions().get(0).getText());
verify(localizer, never()).setLocalizedValues(1L,
LocalizedObjectTypes.PRODUCT_MARKETING_DESC,
Arrays.asList(sl.getDescriptions().get(0)));
verify(localizer, never()).setLocalizedValues(1L,
LocalizedObjectTypes.PRODUCT_MARKETING_NAME,
Arrays.asList(sl.getNames().get(0)));
verify(localizer, never()).setLocalizedValues(1L,
LocalizedObjectTypes.PRODUCT_SHORT_DESCRIPTION,
Arrays.asList(sl.getShortDescriptions().get(0)));
}
private VOService givenVoService() {
VOService service = new VOService();
service.setKey(1L);
service.setVersion(0);
return service;
}
private VOServiceLocalization givenServiceLocalization(
String localizedDescription) {
VOServiceLocalization sl = new VOServiceLocalization();
sl.setDescriptions(Arrays
.asList(givenLocalizedText(localizedDescription)));
sl.setNames(Arrays.asList(givenLocalizedText("name")));
sl.setShortDescriptions(Arrays
.asList(givenLocalizedText("shortDescription")));
sl.setCustomTabNames(Arrays.asList(givenLocalizedText("customTabName")));
return sl;
}
private VOLocalizedText givenLocalizedText(String localizedDescription) {
VOLocalizedText lt = new VOLocalizedText();
lt.setLocale("en");
lt.setText(localizedDescription);
lt.setVersion(0);
return lt;
}
private VOPriceModelLocalization givenPriceModelLocalization(
String description, String license) {
VOPriceModelLocalization localization = new VOPriceModelLocalization();
localization.setDescriptions(Arrays
.asList(givenLocalizedText(description)));
localization.setLicenses(Arrays.asList(givenLocalizedText(license)));
return localization;
}
private VOPriceModel givenPriceModel(boolean chargeable)
throws ObjectNotFoundException {
VOPriceModel pm = new VOPriceModel();
pm.setKey(1L);
if (chargeable) {
pm.setType(PriceModelType.PRO_RATA);
} else {
pm.setType(PriceModelType.FREE_OF_CHARGE);
}
PriceModel p = new PriceModel();
p.setKey(1L);
if (chargeable) {
p.setType(PriceModelType.PRO_RATA);
} else {
p.setType(PriceModelType.FREE_OF_CHARGE);
}
when(ds.getReference(PriceModel.class, pm.getKey())).thenReturn(p);
return pm;
}
@Test
public void savePriceModelLocalization_asSupplier() throws Exception {
// given
givenCurrentUserForOrganization(givenSupplierOrganization());
VOPriceModel priceModel = givenPriceModel(true);
VOPriceModelLocalization localization = givenPriceModelLocalization(
"desc", "license");
doReturn(Boolean.TRUE).when(spsLocalizer)
.checkIsAllowedForLocalizingService(anyLong());
ds.getReference(PriceModel.class, priceModel.getKey()).setProduct(
givenProduct(givenSupplierOrganization()));
doReturn(localization).when(spsLocalizer).getPriceModelLocalization(
anyLong());
// when
sps.savePriceModelLocalization(priceModel, localization);
// then
verify(localizer, times(1)).setLocalizedValues(priceModel.getKey(),
LocalizedObjectTypes.PRICEMODEL_DESCRIPTION,
localization.getDescriptions());
verify(localizer, times(1)).setLocalizedValues(priceModel.getKey(),
LocalizedObjectTypes.PRICEMODEL_LICENSE,
localization.getLicenses());
}
@Test
public void savePriceModelLocalization_asSupplier_notChargeable()
throws Exception {
// given
givenCurrentUserForOrganization(givenSupplierOrganization());
givenProduct(givenSupplierOrganization());
VOPriceModel priceModel = givenPriceModel(false);
Product product = givenProduct(givenSupplierOrganization());
VOPriceModelLocalization localization = givenPriceModelLocalization(
"desc", "license");
doReturn(Boolean.TRUE).when(spsLocalizer)
.checkIsAllowedForLocalizingService(anyLong());
ds.getReference(PriceModel.class, priceModel.getKey()).setProduct(
product);
doReturn(localization).when(spsLocalizer).getPriceModelLocalization(
anyLong());
// when
sps.savePriceModelLocalization(priceModel, localization);
// then
verify(localizer, times(1)).removeLocalizedValues(priceModel.getKey(),
LocalizedObjectTypes.PRICEMODEL_DESCRIPTION);
verify(localizer, times(1)).setLocalizedValues(priceModel.getKey(),
LocalizedObjectTypes.PRICEMODEL_LICENSE,
localization.getLicenses());
}
@Test
public void savePriceModelLocalization_asSupplier_localizedDescriptionRemovedInNonChargeable()
throws Exception {
// given
givenCurrentUserForOrganization(givenSupplierOrganization());
VOPriceModel priceModel = givenPriceModel(false);
VOPriceModelLocalization localization = givenPriceModelLocalization(
"desc", "license");
doReturn(Boolean.TRUE).when(spsLocalizer)
.checkIsAllowedForLocalizingService(anyLong());
ds.getReference(PriceModel.class, priceModel.getKey()).setProduct(
givenProduct(givenSupplierOrganization()));
doReturn(localization).when(spsLocalizer).getPriceModelLocalization(
anyLong());
// when
sps.savePriceModelLocalization(priceModel, localization);
// then
verify(localizer, times(1)).removeLocalizedValues(priceModel.getKey(),
LocalizedObjectTypes.PRICEMODEL_DESCRIPTION);
}
@Test
public void saveServiceLocalization_asSupplier() throws Exception {
// given
givenCurrentUserForOrganization(givenSupplierOrganization());
VOServiceLocalization sl = givenServiceLocalization("localized");
VOService service = givenVoService();
doReturn(Boolean.TRUE).when(spsLocalizer)
.checkIsAllowedForLocalizingService(anyLong());
doReturn(sl).when(spsLocalizer).getServiceLocalization(
any(Product.class));
// when
sps.saveServiceLocalization(service, sl);
// then
verify(localizer, times(1)).setLocalizedValues(1L,
LocalizedObjectTypes.PRODUCT_MARKETING_DESC,
Arrays.asList(sl.getDescriptions().get(0)));
verify(localizer, times(1)).setLocalizedValues(1L,
LocalizedObjectTypes.PRODUCT_MARKETING_NAME,
Arrays.asList(sl.getNames().get(0)));
verify(localizer, times(1)).setLocalizedValues(1L,
LocalizedObjectTypes.PRODUCT_SHORT_DESCRIPTION,
Arrays.asList(sl.getShortDescriptions().get(0)));
verify(localizer, times(1)).setLocalizedValues(1L,
LocalizedObjectTypes.PRODUCT_CUSTOM_TAB_NAME,
Arrays.asList(sl.getCustomTabNames().get(0)));
}
@Test
public void getServiceLocalization_asCustomer() throws Exception {
// given
givenCurrentUserForOrganization(givenCustomerOrganization());
givenProduct(givenSupplierOrganization());
VOService service = givenVoService();
doReturn(Boolean.TRUE).when(spsLocalizer)
.checkIsAllowedForLocalizingService(anyLong());
// when
sps.getServiceLocalization(service);
// then
verify(localizer, times(1)).getLocalizedValues(service.getKey(),
LocalizedObjectTypes.PRODUCT_MARKETING_NAME);
verify(localizer, times(1)).getLocalizedValues(service.getKey(),
LocalizedObjectTypes.PRODUCT_MARKETING_DESC);
verify(localizer, times(1)).getLocalizedValues(service.getKey(),
LocalizedObjectTypes.PRODUCT_SHORT_DESCRIPTION);
}
@Test(expected = OperationNotPermittedException.class)
public void getServiceLocalization_noProduct() throws Exception {
// given
givenProduct(givenSupplierOrganization());
doReturn(Boolean.FALSE).when(spsLocalizer)
.checkIsAllowedForLocalizingService(anyLong());
// when
sps.getServiceLocalization(givenVoService());
}
@Test
public void getPriceModelLocalization() throws Exception {
// given
givenCurrentUserForOrganization(givenSupplierOrganization());
VOPriceModel givenPriceModel = givenPriceModel(true);
doReturn(Boolean.TRUE).when(spsLocalizer)
.checkIsAllowedForLocalizingService(anyLong());
Product p = givenProduct(givenSupplierOrganization());
ds.getReference(PriceModel.class, givenPriceModel.getKey()).setProduct(
p);
ds.getReference(Product.class, p.getKey()).setPriceModel(
ds.getReference(PriceModel.class, givenPriceModel.getKey()));
// when
sps.getPriceModelLocalization(givenPriceModel);
// then
verify(localizer, times(1)).getLocalizedValues(
givenPriceModel.getKey(),
LocalizedObjectTypes.PRICEMODEL_DESCRIPTION);
verify(localizer, times(1)).getLocalizedValues(
givenPriceModel.getKey(),
LocalizedObjectTypes.PRICEMODEL_LICENSE);
}
@Test
public void getPriceModelLocalization_asSupplierOrBroker() throws Exception {
// given
Product product = new Product();
PriceModel priceModel = new PriceModel();
priceModel.setKey(675L);
product.setKey(321L);
product.setTemplate(product);
product.setPriceModel(priceModel);
Organization vendor = new Organization();
product.setVendor(vendor);
when(ds.getReference(Product.class, 321L)).thenReturn(product);
// when
spsLocalizer.getPriceModelLocalizationForSupplierOrBroker(product,
vendor);
// then
verify(localizer, times(1)).getLocalizedValues(priceModel.getKey(),
LocalizedObjectTypes.PRICEMODEL_DESCRIPTION);
verify(localizer, times(1)).getLocalizedValues(priceModel.getKey(),
LocalizedObjectTypes.PRICEMODEL_LICENSE);
}
@Test
public void getPriceModelLocalization_asReseller() throws Exception {
// given
Product product = new Product();
PriceModel priceModel = new PriceModel();
priceModel.setKey(887L);
product.setTemplate(product);
product.setPriceModel(priceModel);
long serviceKey = 989L;
// when
spsLocalizer.getPriceModelLocalizationForReseller(product, serviceKey);
// then
verify(localizer, times(1)).getLocalizedValues(priceModel.getKey(),
LocalizedObjectTypes.PRICEMODEL_DESCRIPTION);
verify(localizer, times(1)).getLocalizedValues(serviceKey,
LocalizedObjectTypes.RESELLER_PRICEMODEL_LICENSE);
}
@Test(expected = ObjectNotFoundException.class)
public void getPriceModelLicenseTemplateLocalization_notFound()
throws Exception {
// given
doThrow(new ObjectNotFoundException()).when(spsLocalizer)
.checkIsAllowedForLocalizingService(anyLong());
// when
sps.getPriceModelLicenseTemplateLocalization(new VOServiceDetails());
}
@Test(expected = OperationNotPermittedException.class)
public void getPriceModelLicenseTemplateLocalization_notPermitted()
throws Exception {
// given
doReturn(Boolean.FALSE).when(spsLocalizer)
.checkIsAllowedForLocalizingService(anyLong());
// when
sps.getPriceModelLicenseTemplateLocalization(new VOServiceDetails());
}
@Test
public void getPriceModelLicenseTemplateLocalization() throws Exception {
// given
doReturn(Boolean.TRUE).when(spsLocalizer)
.checkIsAllowedForLocalizingService(anyLong());
VOServiceDetails givenService = givenVoServiceDetails();
// when
sps.getPriceModelLicenseTemplateLocalization(givenService);
// then
verify(localizer).getLocalizedValues(
givenService.getTechnicalService().getKey(),
LocalizedObjectTypes.PRODUCT_LICENSE_DESC);
}
private VOServiceDetails givenVoServiceDetails() {
VOTechnicalService technicalService = new VOTechnicalService();
technicalService.setKey(2L);
VOServiceDetails givenService = new VOServiceDetails();
givenService.setKey(1L);
givenService.setTechnicalService(technicalService);
return givenService;
}
@Test(expected = OperationNotPermittedException.class)
public void getPriceModelLocalization_OperationNotPermitted()
throws Exception {
// given
PlatformUser u = new PlatformUser();
u.setOrganization(givenOtherOrganization());
doReturn(u).when(ds).getCurrentUser();
// when
sps.getPriceModelLocalization(new VOPriceModel());
}
@Test
public void getServiceLocalization_asReseller() throws Exception {
// given
Organization resellerOrganization = givenResellerOrganization();
givenProduct(resellerOrganization);
doReturn(Boolean.TRUE).when(spsLocalizer)
.checkIsAllowedForLocalizingService(anyLong());
givenCurrentUserForOrganization(resellerOrganization);
VOService voService = givenVoService();
// when
sps.getServiceLocalization(voService);
// then
verify(localizer).getLocalizedValues(voService.getKey(),
LocalizedObjectTypes.PRODUCT_SHORT_DESCRIPTION);
verify(localizer).getLocalizedValues(voService.getKey(),
LocalizedObjectTypes.PRODUCT_MARKETING_NAME);
verify(localizer).getLocalizedValues(voService.getKey(),
LocalizedObjectTypes.PRODUCT_MARKETING_DESC);
}
@Test(expected = OperationNotPermittedException.class)
public void getPriceModelLocalizationAsReseller_OperationNotPermitted()
throws Exception {
// given
Organization resellerOrganization = givenResellerOrganization();
givenProduct(resellerOrganization);
doReturn(Boolean.TRUE).when(spsLocalizer)
.checkIsAllowedForLocalizingService(anyLong());
givenCurrentUserForOrganization(resellerOrganization);
VOPriceModel priceModel = givenPriceModel(false);
// when
sps.getPriceModelLocalization(priceModel);
}
@Test(expected = OperationNotPermittedException.class)
public void savePriceModelLocalizationForReseller() throws Exception {
// given
VOPriceModelLocalization localization = new VOPriceModelLocalization();
givenProduct(new Organization());
doReturn(Boolean.FALSE).when(spsLocalizer)
.checkIsAllowedForLocalizingService(anyLong());
// when
spsLocalizer.savePriceModelLocalizationForReseller(1L, true,
localization);
}
@Test(expected = OperationNotPermittedException.class)
public void savePriceModelLocalizationForSupplier_noProduct()
throws Exception {
// given
PriceModel model = new PriceModel();
model.setProduct(null);
VOPriceModelLocalization localization = new VOPriceModelLocalization();
when(ds.getReference(PriceModel.class, 22L)).thenReturn(model);
// when
spsLocalizer.savePriceModelLocalizationForSupplier(22L, true,
localization);
}
@Test(expected = OperationNotPermittedException.class)
public void savePriceModelLocalizationForSupplier_notAllowed()
throws Exception {
// given
VOPriceModelLocalization localization = new VOPriceModelLocalization();
givenProduct(new Organization());
doReturn(Boolean.FALSE).when(spsLocalizer)
.checkIsAllowedForLocalizingService(anyLong());
PriceModel model = new PriceModel();
model.setProduct(new Product());
when(ds.getReference(PriceModel.class, 22L)).thenReturn(model);
// when
spsLocalizer.savePriceModelLocalizationForSupplier(22L, true,
localization);
}
@Test
public void saveLicenseInformationForPriceModel_oldLicensesEmpty() {
// given
VOPriceModel priceModel = new VOPriceModel();
priceModel.setLicense("license");
List<VOLocalizedText> oldLicenses = new ArrayList<VOLocalizedText>();
long productKey = 1L;
long priceModelKey = 1L;
PlatformUser currentUser = new PlatformUser();
currentUser.setLocale("en");
doReturn(oldLicenses).when(localizer).getLocalizedValues(
eq(productKey), eq(LocalizedObjectTypes.PRODUCT_LICENSE_DESC));
// when
boolean result = sps.saveLicenseInformationForPriceModel(productKey,
priceModelKey, priceModel, currentUser, true);
// then
assertEquals(Boolean.TRUE, Boolean.valueOf(result));
verify(localizer, times(1)).storeLocalizedResource(
eq(currentUser.getLocale()), eq(productKey),
eq(LocalizedObjectTypes.PRICEMODEL_LICENSE),
eq(priceModel.getLicense()));
}
@Test
public void saveLicenseInformationForPriceModel_oldLicensesNull() {
// given
VOPriceModel priceModel = new VOPriceModel();
priceModel.setLicense("license");
long productKey = 1L;
long priceModelKey = 1L;
PlatformUser currentUser = new PlatformUser();
currentUser.setLocale("en");
doReturn(Collections.emptyList()).when(localizer).getLocalizedValues(eq(productKey),
eq(LocalizedObjectTypes.PRODUCT_LICENSE_DESC));
// when
boolean result = sps.saveLicenseInformationForPriceModel(productKey,
priceModelKey, priceModel, currentUser, true);
// then
assertEquals(Boolean.TRUE, Boolean.valueOf(result));
verify(localizer, times(1)).storeLocalizedResource(
eq(currentUser.getLocale()), eq(productKey),
eq(LocalizedObjectTypes.PRICEMODEL_LICENSE),
eq(priceModel.getLicense()));
}
@Test
public void saveLicenseInformationForPriceModel_oldLicensesEmptyAndNewLicenseEmpty() {
// given
final String EMPTY = "";
VOPriceModel priceModel = new VOPriceModel();
priceModel.setLicense(null);
List<VOLocalizedText> oldLicenses = new ArrayList<VOLocalizedText>();
long productKey = 1L;
long priceModelKey = 1L;
PlatformUser currentUser = new PlatformUser();
currentUser.setLocale("en");
doReturn(oldLicenses).when(localizer).getLocalizedValues(
eq(productKey), eq(LocalizedObjectTypes.PRODUCT_LICENSE_DESC));
// when
boolean result = sps.saveLicenseInformationForPriceModel(productKey,
priceModelKey, priceModel, currentUser, true);
// then
assertEquals(Boolean.FALSE, Boolean.valueOf(result));
verify(localizer, times(1)).storeLocalizedResource(
eq(currentUser.getLocale()), eq(productKey),
eq(LocalizedObjectTypes.PRICEMODEL_LICENSE), eq(EMPTY));
}
@Test
public void saveLicenseInformationForPriceModel_oldLicensesExists() {
// given
final String NEW_LICENSE_DESCRIPTION = "license";
final String OLD_LICENSE_DESCRIPTION = "description";
VOPriceModel priceModel = new VOPriceModel();
priceModel.setLicense(NEW_LICENSE_DESCRIPTION);
priceModel.setKey(0);
List<VOLocalizedText> oldLicenses = new ArrayList<VOLocalizedText>();
VOLocalizedText enText = createVOLocalizedText("en",
OLD_LICENSE_DESCRIPTION, 0);
oldLicenses.add(enText);
VOLocalizedText deText = createVOLocalizedText("de",
OLD_LICENSE_DESCRIPTION, 0);
oldLicenses.add(deText);
long productKey = 1L;
long priceModelKey = 1L;
PlatformUser currentUser = new PlatformUser();
currentUser.setLocale("jp");
doReturn(oldLicenses).when(localizer).getLocalizedValues(
eq(priceModel.getKey()),
eq(LocalizedObjectTypes.PRICEMODEL_LICENSE));
// when
boolean result = sps.saveLicenseInformationForPriceModel(productKey,
priceModelKey, priceModel, currentUser, false);
// then
assertEquals(Boolean.TRUE, Boolean.valueOf(result));
verify(localizer, times(1)).storeLocalizedResource(
eq(currentUser.getLocale()), eq(productKey),
eq(LocalizedObjectTypes.PRICEMODEL_LICENSE),
eq(NEW_LICENSE_DESCRIPTION));
verify(localizer, times(1)).storeLocalizedResource(eq("en"),
eq(productKey), eq(LocalizedObjectTypes.PRICEMODEL_LICENSE),
eq(OLD_LICENSE_DESCRIPTION));
verify(localizer, times(1)).storeLocalizedResource(eq("de"),
eq(productKey), eq(LocalizedObjectTypes.PRICEMODEL_LICENSE),
eq(OLD_LICENSE_DESCRIPTION));
}
/**
* @return
*/
private VOLocalizedText createVOLocalizedText(String locale, String text,
int version) {
VOLocalizedText enText = new VOLocalizedText();
enText.setLocale(locale);
enText.setText(text);
enText.setVersion(version);
return enText;
}
@Test
public void saveLicenseInformationForPriceModel_oldLicensesForUserLocaleExists() {
// given
final String NEW_LICENSE_DESCRIPTION = "license";
final String OLD_LICENSE_DESCRIPTION = "description";
VOPriceModel priceModel = new VOPriceModel();
priceModel.setLicense(NEW_LICENSE_DESCRIPTION);
priceModel.setKey(0);
List<VOLocalizedText> oldLicenses = new ArrayList<VOLocalizedText>();
VOLocalizedText jpText = createVOLocalizedText("jp",
OLD_LICENSE_DESCRIPTION, 0);
oldLicenses.add(jpText);
long productKey = 1L;
long priceModelKey = 1L;
PlatformUser currentUser = new PlatformUser();
currentUser.setLocale("jp");
doReturn(oldLicenses).when(localizer).getLocalizedValues(
eq(priceModel.getKey()),
eq(LocalizedObjectTypes.PRICEMODEL_LICENSE));
// when
boolean result = sps.saveLicenseInformationForPriceModel(productKey,
priceModelKey, priceModel, currentUser, false);
// then
assertEquals(Boolean.TRUE, Boolean.valueOf(result));
verify(localizer, times(1)).storeLocalizedResource(
eq(currentUser.getLocale()), eq(productKey),
eq(LocalizedObjectTypes.PRICEMODEL_LICENSE),
eq(NEW_LICENSE_DESCRIPTION));
}
@Test
public void saveLicenseInformationForPriceModel_oldLicensesForUserLocaleExistsSameNewLicense() {
// given
final String UNCHANGED_LICENSE_DESCRIPTION = "description";
VOPriceModel priceModel = new VOPriceModel();
priceModel.setLicense(UNCHANGED_LICENSE_DESCRIPTION);
priceModel.setKey(0);
List<VOLocalizedText> oldLicenses = new ArrayList<VOLocalizedText>();
VOLocalizedText jpText = createVOLocalizedText("jp",
UNCHANGED_LICENSE_DESCRIPTION, 0);
oldLicenses.add(jpText);
long productKey = 1L;
long priceModelKey = 1L;
PlatformUser currentUser = new PlatformUser();
currentUser.setLocale("jp");
doReturn(oldLicenses).when(localizer).getLocalizedValues(
eq(priceModel.getKey()),
eq(LocalizedObjectTypes.PRICEMODEL_LICENSE));
// when
boolean result = sps.saveLicenseInformationForPriceModel(productKey,
priceModelKey, priceModel, currentUser, false);
// then
assertEquals(Boolean.TRUE, Boolean.valueOf(result));
verify(localizer, times(1)).storeLocalizedResource(
eq(currentUser.getLocale()), eq(productKey),
eq(LocalizedObjectTypes.PRICEMODEL_LICENSE),
eq(UNCHANGED_LICENSE_DESCRIPTION));
}
@Test
public void saveLicenseInformationForPriceModel_oldLicensesForUserLocaleNotExistsNewLicenseEmpty() {
// given
final String EMPTY_LICENSE_DESCRIPTION = "";
final String OLD_LICENSE_DESCRIPTION = "description";
VOPriceModel priceModel = new VOPriceModel();
priceModel.setLicense(EMPTY_LICENSE_DESCRIPTION);
priceModel.setKey(0);
List<VOLocalizedText> oldLicenses = new ArrayList<VOLocalizedText>();
VOLocalizedText jpText = createVOLocalizedText("jp",
OLD_LICENSE_DESCRIPTION, 0);
oldLicenses.add(jpText);
long productKey = 1L;
long priceModelKey = 1L;
PlatformUser currentUser = new PlatformUser();
currentUser.setLocale("de");
doReturn(oldLicenses).when(localizer).getLocalizedValues(
eq(priceModel.getKey()),
eq(LocalizedObjectTypes.PRICEMODEL_LICENSE));
// when
boolean result = sps.saveLicenseInformationForPriceModel(productKey,
priceModelKey, priceModel, currentUser, false);
// then
assertEquals(Boolean.FALSE, Boolean.valueOf(result));
verify(localizer, times(1)).storeLocalizedResource(
eq(currentUser.getLocale()), eq(productKey),
eq(LocalizedObjectTypes.PRICEMODEL_LICENSE),
eq(EMPTY_LICENSE_DESCRIPTION));
verify(localizer, times(1)).storeLocalizedResource(eq("jp"),
eq(productKey), eq(LocalizedObjectTypes.PRICEMODEL_LICENSE),
eq(OLD_LICENSE_DESCRIPTION));
}
@Test
public void localizePriceModel_ChargeableAndDescriptionIsSame() {
// given
VOPriceModel priceModel = new VOPriceModel();
PriceModel priceModelToStore = new PriceModel();
long priceModelKey = 1L;
priceModel.setKey(priceModelKey);
priceModel.setType(PriceModelType.PER_UNIT);
priceModelToStore.setType(PriceModelType.PER_UNIT);
long priceModelToStoreKey = 2L;
priceModelToStore.setKey(priceModelToStoreKey);
PlatformUser currentUser = new PlatformUser();
currentUser.setLocale("en");
String currentDescription = "CURRENTDESCRIPTION";
priceModel.setDescription(currentDescription);
doReturn(currentDescription).when(localizer)
.getLocalizedTextFromDatabase(currentUser.getLocale(),
priceModelKey,
LocalizedObjectTypes.PRICEMODEL_DESCRIPTION);
// when
boolean result = sps.localizePriceModel(priceModel, currentUser,
priceModelToStore);
// then
assertEquals(Boolean.FALSE, Boolean.valueOf(result));
verify(localizer, never()).storeLocalizedResource(
eq(currentUser.getLocale()), eq(priceModelToStoreKey),
eq(LocalizedObjectTypes.PRICEMODEL_LICENSE),
eq(priceModel.getDescription()));
verify(localizer, never()).removeLocalizedValues(eq(priceModelKey),
eq(LocalizedObjectTypes.PRICEMODEL_DESCRIPTION));
}
@Test
public void localizePriceModel_ChargeableAndDescriptionIsNotSame() {
// given
VOPriceModel priceModel = new VOPriceModel();
PriceModel priceModelToStore = new PriceModel();
long priceModelKey = 1L;
priceModel.setKey(priceModelKey);
priceModel.setType(PriceModelType.PER_UNIT);
priceModelToStore.setType(PriceModelType.PER_UNIT);
long priceModelToStoreKey = 2L;
priceModelToStore.setKey(priceModelToStoreKey);
PlatformUser currentUser = new PlatformUser();
currentUser.setLocale("en");
String currentDescription = "CURRENTDESCRIPTION";
priceModel.setDescription("DESCRIPTION");
doReturn(currentDescription).when(localizer)
.getLocalizedTextFromDatabase(currentUser.getLocale(),
priceModelKey,
LocalizedObjectTypes.PRICEMODEL_DESCRIPTION);
// when
boolean result = sps.localizePriceModel(priceModel, currentUser,
priceModelToStore);
// then
assertEquals(Boolean.TRUE, Boolean.valueOf(result));
verify(localizer, times(1)).storeLocalizedResource(
eq(currentUser.getLocale()), eq(priceModelToStoreKey),
eq(LocalizedObjectTypes.PRICEMODEL_DESCRIPTION),
eq(priceModel.getDescription()));
verify(localizer, never()).removeLocalizedValues(eq(priceModelKey),
eq(LocalizedObjectTypes.PRICEMODEL_DESCRIPTION));
}
@Test
public void localizePriceModel_FreeOfChargeToChargeable() {
// given
VOPriceModel priceModel = new VOPriceModel();
PriceModel priceModelToStore = new PriceModel();
long priceModelKey = 1L;
priceModel.setKey(priceModelKey);
priceModel.setType(PriceModelType.PER_UNIT);
priceModelToStore.setType(PriceModelType.FREE_OF_CHARGE);
long priceModelToStoreKey = 2L;
priceModelToStore.setKey(priceModelToStoreKey);
PlatformUser currentUser = new PlatformUser();
currentUser.setLocale("en");
String currentDescription = "";
priceModel.setDescription("DESCRIPTION");
doReturn(currentDescription).when(localizer)
.getLocalizedTextFromDatabase(currentUser.getLocale(),
priceModelKey,
LocalizedObjectTypes.PRICEMODEL_DESCRIPTION);
// when
boolean result = sps.localizePriceModel(priceModel, currentUser,
priceModelToStore);
// then
assertEquals(Boolean.TRUE, Boolean.valueOf(result));
verify(localizer, times(1)).storeLocalizedResource(
eq(currentUser.getLocale()), eq(priceModelToStoreKey),
eq(LocalizedObjectTypes.PRICEMODEL_DESCRIPTION),
eq(priceModel.getDescription()));
verify(localizer, never()).removeLocalizedValues(eq(priceModelKey),
eq(LocalizedObjectTypes.PRICEMODEL_DESCRIPTION));
}
@Test
public void localizePriceModel_FreeOfCharge() {
// given
VOPriceModel priceModel = new VOPriceModel();
PriceModel priceModelToStore = new PriceModel();
long priceModelKey = 1L;
priceModel.setKey(priceModelKey);
priceModel.setType(PriceModelType.FREE_OF_CHARGE);
priceModelToStore.setType(PriceModelType.FREE_OF_CHARGE);
long priceModelToStoreKey = 2L;
priceModelToStore.setKey(priceModelToStoreKey);
PlatformUser currentUser = new PlatformUser();
currentUser.setLocale("en");
String currentDescription = "";
priceModel.setDescription("");
doReturn(currentDescription).when(localizer)
.getLocalizedTextFromDatabase(currentUser.getLocale(),
priceModelKey,
LocalizedObjectTypes.PRICEMODEL_DESCRIPTION);
// when
boolean result = sps.localizePriceModel(priceModel, currentUser,
priceModelToStore);
// then
assertEquals(Boolean.FALSE, Boolean.valueOf(result));
verify(localizer, never()).storeLocalizedResource(
eq(currentUser.getLocale()), eq(priceModelToStoreKey),
eq(LocalizedObjectTypes.PRICEMODEL_DESCRIPTION),
eq(priceModel.getDescription()));
verify(localizer, never()).removeLocalizedValues(eq(priceModelKey),
eq(LocalizedObjectTypes.PRICEMODEL_DESCRIPTION));
}
@Test
public void localizePriceModel_DescriptionIsNull() {
// given
VOPriceModel priceModel = new VOPriceModel();
PriceModel priceModelToStore = new PriceModel();
long priceModelKey = 1L;
priceModel.setKey(priceModelKey);
priceModel.setType(PriceModelType.FREE_OF_CHARGE);
priceModelToStore.setType(PriceModelType.FREE_OF_CHARGE);
long priceModelToStoreKey = 2L;
priceModelToStore.setKey(priceModelToStoreKey);
PlatformUser currentUser = new PlatformUser();
currentUser.setLocale("en");
String currentDescription = "";
priceModel.setDescription(null);
doReturn(currentDescription).when(localizer)
.getLocalizedTextFromDatabase(currentUser.getLocale(),
priceModelKey,
LocalizedObjectTypes.PRICEMODEL_DESCRIPTION);
// when
boolean result = sps.localizePriceModel(priceModel, currentUser,
priceModelToStore);
// then
assertEquals(Boolean.FALSE, Boolean.valueOf(result));
verify(localizer, never()).storeLocalizedResource(
eq(currentUser.getLocale()), eq(priceModelToStoreKey),
eq(LocalizedObjectTypes.PRICEMODEL_DESCRIPTION),
eq(priceModel.getDescription()));
verify(localizer, never()).removeLocalizedValues(eq(priceModelKey),
eq(LocalizedObjectTypes.PRICEMODEL_DESCRIPTION));
}
@Test
public void savePriceModelLocalizationAsReseller_bug11573() {
// given
VOPriceModelLocalization localization = new VOPriceModelLocalization();
List<VOLocalizedText> licenses = new ArrayList<VOLocalizedText>();
VOLocalizedText text = new VOLocalizedText();
text.setText(null);
text.setLocale("en");
licenses.add(text);
localization.setLicenses(licenses);
// when
spsLocalizer.savePriceModelLocalizationAsReseller(1L, localization);
// then
verify(localizer, times(1)).storeLocalizedResource(eq("en"), eq(1L),
eq(LocalizedObjectTypes.RESELLER_PRICEMODEL_LICENSE), eq(""));
}
@Test
public void saveLicenseInformationForPriceModelOldLicensesAndNewOne() {
// given
VOPriceModel priceModel = new VOPriceModel();
priceModel.setLicense("license");
List<VOLocalizedText> oldLicenses = Arrays.asList(new VOLocalizedText[]{givenLocalizedText("engText")});
long productKey = 1L;
long priceModelKey = 1L;
PlatformUser currentUser = new PlatformUser();
currentUser.setLocale("de");
doReturn(oldLicenses).when(localizer).getLocalizedValues(
eq(productKey), eq(LocalizedObjectTypes.PRODUCT_LICENSE_DESC));
// when
boolean result = sps.saveLicenseInformationForPriceModel(productKey,
priceModelKey, priceModel, currentUser, true);
// then
assertEquals(Boolean.TRUE, Boolean.valueOf(result));
verify(localizer, times(1)).storeLocalizedResource(
eq(currentUser.getLocale()), eq(productKey),
eq(LocalizedObjectTypes.PRICEMODEL_LICENSE),
eq(priceModel.getLicense()));
}
}
| |
/*
* Copyright 2021 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.server.service;
import com.thoughtworks.go.config.CaseInsensitiveString;
import com.thoughtworks.go.config.CruiseConfig;
import com.thoughtworks.go.config.GoConfigDao;
import com.thoughtworks.go.config.materials.PackageMaterial;
import com.thoughtworks.go.config.materials.PackageMaterialConfig;
import com.thoughtworks.go.config.materials.PluggableSCMMaterial;
import com.thoughtworks.go.config.materials.dependency.DependencyMaterial;
import com.thoughtworks.go.config.materials.git.GitMaterial;
import com.thoughtworks.go.config.materials.mercurial.HgMaterial;
import com.thoughtworks.go.config.materials.svn.SvnMaterial;
import com.thoughtworks.go.domain.MaterialRevision;
import com.thoughtworks.go.domain.MaterialRevisions;
import com.thoughtworks.go.domain.Pipeline;
import com.thoughtworks.go.domain.buildcause.BuildCause;
import com.thoughtworks.go.helper.MaterialsMother;
import com.thoughtworks.go.server.cache.GoCache;
import com.thoughtworks.go.server.dao.DatabaseAccessHelper;
import com.thoughtworks.go.server.domain.PipelineTimeline;
import com.thoughtworks.go.server.materials.DependencyMaterialUpdateNotifier;
import com.thoughtworks.go.server.materials.MaterialChecker;
import com.thoughtworks.go.server.persistence.MaterialRepository;
import com.thoughtworks.go.server.transaction.TransactionTemplate;
import com.thoughtworks.go.util.GoConfigFileHelper;
import com.thoughtworks.go.util.SystemEnvironment;
import org.hamcrest.Matchers;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import static com.thoughtworks.go.util.SystemEnvironment.RESOLVE_FANIN_MAX_BACK_TRACK_LIMIT;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = {
"classpath:/applicationContext-global.xml",
"classpath:/applicationContext-dataLocalAccess.xml",
"classpath:/testPropertyConfigurer.xml",
"classpath:/spring-all-servlet.xml",
})
public class FaninDependencyResolutionTest {
@Autowired
private DatabaseAccessHelper dbHelper;
@Autowired
private GoCache goCache;
@Autowired
private GoConfigDao goConfigDao;
@Autowired
private PipelineService pipelineService;
@Autowired
private MaterialRepository materialRepository;
@Autowired
private TransactionTemplate transactionTemplate;
@Autowired
private GoConfigService goConfigService;
@Autowired
private SystemEnvironment systemEnvironment;
@Autowired
private MaterialChecker materialChecker;
@Autowired
private PipelineTimeline pipelineTimeline;
@Autowired
private DependencyMaterialUpdateNotifier notifier;
private GoConfigFileHelper configHelper = new GoConfigFileHelper();
private ScheduleTestUtil u;
@Before
public void setUp() throws Exception {
goCache.clear();
configHelper.usingCruiseConfigDao(goConfigDao);
configHelper.onSetUp();
dbHelper.onSetUp();
u = new ScheduleTestUtil(transactionTemplate, materialRepository, dbHelper, configHelper);
notifier.disableUpdates();
}
@After
public void teardown() throws Exception {
notifier.enableUpdates();
systemEnvironment.reset(RESOLVE_FANIN_MAX_BACK_TRACK_LIMIT);
dbHelper.onTearDown();
configHelper.onTearDown();
}
private void setMaxBackTrackLimit(int limit) {
systemEnvironment.set(RESOLVE_FANIN_MAX_BACK_TRACK_LIMIT, limit);
}
private Integer maxBackTrackLimit() {
return systemEnvironment.get(SystemEnvironment.RESOLVE_FANIN_MAX_BACK_TRACK_LIMIT);
}
@Test
public void shouldRestoreMaterialNamesBasedOnMaterialConfig() throws Exception {
/*
g -> up -> down
+-> mid -+
*/
GitMaterial git = u.wf(new GitMaterial("git"), "folder1");
u.checkinInOrder(git, "g1");
ScheduleTestUtil.AddedPipeline up = u.saveConfigWith("up", u.m(git));
ScheduleTestUtil.MaterialDeclaration upForMid = u.m(up);
((DependencyMaterial) upForMid.material).setName(new CaseInsensitiveString("up-for-mid"));
ScheduleTestUtil.AddedPipeline mid = u.saveConfigWith("mid", upForMid);
ScheduleTestUtil.MaterialDeclaration upForDown = u.m(up);
((DependencyMaterial) upForDown.material).setName(new CaseInsensitiveString("up-for-down"));
ScheduleTestUtil.AddedPipeline down = u.saveConfigWith("down", u.m(mid), upForDown);
CruiseConfig cruiseConfig = goConfigDao.load();
String up_1 = u.runAndPass(up, "g1");
String mid_1 = u.runAndPass(mid, up_1);
String down_1 = u.runAndPass(down, mid_1, up_1);
MaterialRevisions given = u.mrs(
u.mr(mid, false, mid_1),
u.mr(up, false, up_1));
MaterialRevisions revisionsBasedOnDependencies = getRevisionsBasedOnDependencies(down, cruiseConfig, given);
for (MaterialRevision revisionsBasedOnDependency : revisionsBasedOnDependencies) {
DependencyMaterial dependencyPipeline = (DependencyMaterial) revisionsBasedOnDependency.getMaterial();
if (dependencyPipeline.getPipelineName().equals(new CaseInsensitiveString("up"))) {
assertThat(dependencyPipeline.getName(), is(new CaseInsensitiveString("up-for-down")));
}
}
assertThat(revisionsBasedOnDependencies, is(given));
}
@Test
public void shouldTriggerCommonCIAndCDPhasesCorrectly_FAILING_SCN() throws Exception {
// -----> Acceptance ----
// | | v
// Build | Staging ---> Production
// | v ^
// ----->Regression ----
//
GitMaterial git = u.wf(new GitMaterial("git"), "folder1");
u.checkinInOrder(git, "g1", "g2", "g3");
ScheduleTestUtil.AddedPipeline build = u.saveConfigWith("build", u.m(git));
ScheduleTestUtil.AddedPipeline acceptance = u.saveConfigWith("acceptance", u.m(build));
ScheduleTestUtil.AddedPipeline regression = u.saveConfigWith("regression", u.m(build), u.m(acceptance));
ScheduleTestUtil.AddedPipeline staging = u.saveConfigWith("staging", u.m(acceptance), u.m(regression));
ScheduleTestUtil.AddedPipeline production = u.saveConfigWith("production", u.m(staging));
CruiseConfig cruiseConfig = goConfigDao.load();
int i = 1;
String b_1 = u.runAndPass(build, "g1");
String a_1 = u.runAndPass(acceptance, b_1);
String r_1 = u.runAndPass(regression, b_1, a_1);
String s_1 = u.runAndPass(staging, a_1, r_1);
String p_1 = u.runAndPass(production, s_1);
String b_2 = u.runAndPass(build, "g2");
MaterialRevisions given = u.mrs(
u.mr(build, true, b_2),
u.mr(acceptance, false, a_1));
MaterialRevisions expected = u.mrs(
u.mr(build, true, b_1),
u.mr(acceptance, false, a_1));
assertThat(getRevisionsBasedOnDependencies(regression, cruiseConfig, given), is(expected));
String a_2 = u.runAndPass(acceptance, b_2);
given = u.mrs(
u.mr(build, true, b_2),
u.mr(acceptance, false, a_2));
expected = u.mrs(
u.mr(build, true, b_2),
u.mr(acceptance, true, a_2));
assertThat(getRevisionsBasedOnDependencies(regression, cruiseConfig, given), is(expected));
String r_2 = u.runAndPass(regression, b_2, a_2);
String r_3 = u.runAndPass(regression, b_1, a_2);
given = u.mrs(
u.mr(acceptance, true, a_2),
u.mr(regression, true, r_3));
expected = u.mrs(
u.mr(acceptance, true, a_2),
u.mr(regression, true, r_2));
assertThat(getRevisionsBasedOnDependencies(staging, cruiseConfig, given), is(expected));
String s_2 = u.runAndPass(staging, a_2, r_2);
String s_3 = u.runAndPass(staging, a_2, r_3);
given = u.mrs(u.mr(staging, true, s_3));
expected = u.mrs(u.mr(staging, true, s_2));
assertThat(getRevisionsBasedOnDependencies(production, cruiseConfig, given), is(expected));
String b_3 = u.runAndPass(build, "g3");
String a_3 = u.runAndPass(acceptance, b_3);
String r_4 = u.runAndPass(regression, b_3, a_3);
String s_4 = u.runAndPass(staging, a_3, r_1);
String s_5 = u.runAndPass(staging, a_1, r_4);
given = u.mrs(
u.mr(acceptance, true, a_3),
u.mr(regression, true, r_4));
expected = u.mrs(
u.mr(acceptance, true, a_3),
u.mr(regression, true, r_4));
MaterialRevisions previousMaterialRevisions = u.mrs(
u.mr(acceptance, false, a_1),
u.mr(regression, false, r_4));
assertThat(getRevisionsBasedOnDependencies(staging, cruiseConfig, given), is(expected));
// assertThat(getBuildCause(staging,given,previousMaterialRevisions), is(not(nullValue()))); //TODO: *************** Bug where pipeline should be triggered <Sara>
String r_5 = u.runAndPass(regression, b_3, a_3);
given = u.mrs(
u.mr(acceptance, true, a_3),
u.mr(regression, true, r_5));
expected = u.mrs(
u.mr(acceptance, true, a_3),
u.mr(regression, true, r_5));
assertThat(getRevisionsBasedOnDependencies(staging, cruiseConfig, given), is(expected));
previousMaterialRevisions = u.mrs(
u.mr(acceptance, false, a_1),
u.mr(regression, false, r_4));
assertThat(getBuildCause(staging, given, previousMaterialRevisions).getMaterialRevisions(), is(expected));
}
@Test
public void shouldReturnPreviousBuild_sRevisionsIfOneParentFailed() {
// git ----------
// | |
// ----- ------ |
// | | |
// v v |
// P1 P2 |
// | | |
// | | |
// --->P3<-- |
// ^____________|
GitMaterial gitMaterial = new GitMaterial("git-url");
u.checkinInOrder(gitMaterial, "g1", "g2");
ScheduleTestUtil.AddedPipeline p1 = u.saveConfigWith("P1", u.m(gitMaterial));
ScheduleTestUtil.AddedPipeline p2 = u.saveConfigWith("P2", u.m(gitMaterial));
ScheduleTestUtil.AddedPipeline p3 = u.saveConfigWith("P3", u.m(p1), u.m(p2), u.m(gitMaterial));
String p1_1 = u.runAndPass(p1, "g1");
String p2_1 = u.runAndPass(p2, "g1");
String p3_1 = u.runAndPass(p3, p1_1, p2_1, "g1");
String p1_2 = u.runAndPass(p1, "g2");
String p2_2 = u.runAndFail(p2, "g2");
MaterialRevisions given = u.mrs(new MaterialRevision[]{
u.mr(gitMaterial, true, "g2"),
u.mr(p1, true, p1_2),
u.mr(p2, true, p2_1)});
MaterialRevisions expected = u.mrs(new MaterialRevision[]{
u.mr(gitMaterial, true, "g1"),
u.mr(p1, true, p1_1),
u.mr(p2, true, p2_1)});
MaterialRevisions finalRevisions = getRevisionsBasedOnDependencies(p3, goConfigDao.load(), given);
assertThat(finalRevisions, is(expected));
}
@Test
public void shouldResolveWithMultipleDiamondsOnSamePipelines() throws Exception {
/*
|-------v
| /--P1--\
git hg P5
| \--P2--/
+------^
*/
HgMaterial hg = u.wf(new HgMaterial("hg", null), "folder1");
String[] hg_revs = {"h11", "h12"};
u.checkinInOrder(hg, hg_revs);
GitMaterial git = u.wf(new GitMaterial("git"), "folder2");
String[] git_revs = {"g11", "g12", "g13"};
u.checkinInOrder(git, git_revs);
ScheduleTestUtil.AddedPipeline p1 = u.saveConfigWith("p1", u.m(hg), u.m(git));
ScheduleTestUtil.AddedPipeline p2 = u.saveConfigWith("p2", u.m(hg), u.m(git));
ScheduleTestUtil.AddedPipeline p3 = u.saveConfigWith("p3", u.m(p1), u.m(p2));
String p1_1 = u.runAndPass(p1, "h12", "g11");
String p2_1 = u.runAndPass(p2, "h11", "g13");
String p1_2 = u.runAndPass(p1, "h11", "g13");
String p2_2 = u.runAndPass(p2, "h12", "g13");
MaterialRevisions given = u.mrs(new MaterialRevision[]{
u.mr(p1, true, p1_2),
u.mr(p2, true, p2_2)});
MaterialRevisions expected = u.mrs(new MaterialRevision[]{
u.mr(p1, true, p1_2),
u.mr(p2, true, p2_1)});
assertThat(getRevisionsBasedOnDependencies(p3, goConfigDao.load(), given), is(expected));
}
@Test
public void shouldFindCompatibleRevisionWhenDependencyMaterialHasMaterialName() throws Exception {
// Third <- Second
// | /
// | /
// Last
//
SvnMaterial svn = u.wf(new SvnMaterial("svn", "username", "password", false), "folder1");
String[] svn_revs = {"s1"};
u.checkinInOrder(svn, svn_revs);
ScheduleTestUtil.AddedPipeline second = u.saveConfigWith("second", u.m(svn));
ScheduleTestUtil.AddedPipeline third = u.saveConfigWith("third", u.m(second, "two"));
ScheduleTestUtil.AddedPipeline last = u.saveConfigWith("last", u.m(third, "three"), u.m(second, "two_2"));
String second_1 = u.runAndPass(second, "s1");
String third_1 = u.runAndPass(third, second_1);
MaterialRevisions given = u.mrs(
u.mr(third, true, third_1),
u.mr(second, true, second_1));
assertThat(getRevisionsBasedOnDependencies(last, goConfigDao.load(), given), is(given));
}
@Test
public void shouldFindCompatibleRevisionWhenSameMaterialHasDiffFolderNamesInGraph() throws Exception {
// Second <- Svn
// | /
// | /
// Third
//
SvnMaterial svn1 = u.wf(new SvnMaterial("svn", "username", "password", false), "one");
SvnMaterial svn2 = u.wf(new SvnMaterial("svn", "username", "password", false), "two");
String[] svn_revs = {"s1"};
u.checkinInOrder(svn1, svn_revs);
ScheduleTestUtil.AddedPipeline second = u.saveConfigWith("second", u.m(svn1));
ScheduleTestUtil.AddedPipeline third = u.saveConfigWith("third", u.m(second), u.m(svn2));
String second_1 = u.runAndPass(second, "s1");
MaterialRevisions given = u.mrs(
u.mr(second, true, second_1),
u.mr(svn2, true, "s1")
);
MaterialRevisions materialRevisions = getRevisionsBasedOnDependencies(third, goConfigDao.load(), given);
assertThat(materialRevisions, is(given));
}
@Test
public void shouldComputeRevisionCorrectlyWhenUpstreamPipelineHasModifications_ForDifferentStages() throws Exception {
/*
/-->P1------\ p2(s1) --> p4
git P3
\-->P2(S2)--/
*/
GitMaterial git = u.wf(new GitMaterial("git"), "f");
ScheduleTestUtil.AddedPipeline p1 = u.saveConfigWith("p1", u.m(git));
ScheduleTestUtil.AddedPipeline p2_s1 = u.saveConfigWith("p2", "s1", u.m(git));
ScheduleTestUtil.AddedPipeline p2_s2 = u.addStageToPipeline(p2_s1.config.name(), "s2");
ScheduleTestUtil.MaterialDeclaration p2_material = u.m(new DependencyMaterial(p2_s1.config.name(), new CaseInsensitiveString("s2")));
ScheduleTestUtil.AddedPipeline p3 = u.saveConfigWith("p3", p2_material, u.m(p1));
ScheduleTestUtil.AddedPipeline p4 = u.saveConfigWith("p4", u.m(new DependencyMaterial(p2_s1.config.name(), new CaseInsensitiveString("s1"))));
u.checkinInOrder(git, "g1");
String p1_1 = u.runAndPass(p1, "g1");
String p2_s2_1 = u.runAndPass(p2_s2, "g1");
MaterialRevisions given = u.mrs(
u.mr(p1, true, p1_1),
u.mr(p2_s2, true, p2_s2_1));
MaterialRevisions revisionsBasedOnDependencies = getRevisionsBasedOnDependencies(p3, goConfigDao.load(), given);
assertThat(revisionsBasedOnDependencies, is(given));
}
@Test
public void shouldPickTheRightRevisionsWhenMaterialIsRemovedAndPutBack() {
GitMaterial git1 = u.wf(new GitMaterial("git1-url"), "git-folder1");
GitMaterial git2 = u.wf(new GitMaterial("git2-url"), "git-folder2");
GitMaterial git3 = u.wf(new GitMaterial("git3-url"), "git-folder3");
ScheduleTestUtil.AddedPipeline p1 = u.saveConfigWith("P1", u.m(git1), u.m(git2));
ScheduleTestUtil.AddedPipeline p2 = u.saveConfigWith("P2", u.m(git2));
ScheduleTestUtil.AddedPipeline p3 = u.saveConfigWith("P3", u.m(p1), u.m(p2), u.m(git1));
ScheduleTestUtil.AddedPipeline p4 = u.saveConfigWith("P4", u.m(p3), u.m(git2));
u.checkinInOrder(git1, "git1_1");
u.checkinInOrder(git2, "git2_1");
String p1_1 = u.runAndPass(p1, "git1_1", "git2_1");
String p2_1 = u.runAndPass(p2, "git2_1");
String p3_1 = u.runAndPass(p3, p1_1, p2_1, "git1_1");
String p4_1 = u.runAndPass(p4, p3_1, "git2_1");
u.checkinInOrder(git1, "git1_2");
u.checkinInOrder(git2, "git2_2");
String p1_2 = u.runAndPass(p1, "git1_2", "git2_2");
String p2_2 = u.runAndPass(p2, "git2_2");
String p3_2 = u.runAndPass(p3, p1_2, p2_2, "git1_2");
String p4_2 = u.runAndPass(p4, p3_2, "git2_2");
configHelper.setMaterialConfigForPipeline("P2", git3.config());
CruiseConfig cruiseConfig = goConfigDao.load();
p2 = new ScheduleTestUtil.AddedPipeline(cruiseConfig.pipelineConfigByName(new CaseInsensitiveString("P2")), p2.material);
u.checkinInOrder(git1, "git1_3");
u.checkinInOrder(git2, "git2_3");
u.checkinInOrder(git3, "git3_1");
String p1_3 = u.runAndPass(p1, "git1_3", "git2_3");
String p2_3 = u.runAndPass(p2, "git3_1");
String p3_3 = u.runAndPass(p3, p1_3, p2_3, "git1_3");
//check wat happens to p4
MaterialRevisions given = u.mrs(new MaterialRevision[]{
u.mr(git2, true, "git2_3"),
u.mr(p3, true, p3_3)});
MaterialRevisions expected = u.mrs(new MaterialRevision[]{
u.mr(git2, true, "git2_3"),
u.mr(p3, true, p3_3)});
MaterialRevisions finalRevisions = getRevisionsBasedOnDependencies(p4, cruiseConfig, given);
assertThat(finalRevisions, is(expected));
//bring back git2 in p2
configHelper.setMaterialConfigForPipeline("P2", git2.config());
cruiseConfig = goConfigDao.load();
p2 = new ScheduleTestUtil.AddedPipeline(cruiseConfig.pipelineConfigByName(new CaseInsensitiveString("P2")), p2.material);
//check wat happend to p4
given = u.mrs(
u.mr(git2, true, "git2_3"),
u.mr(p3, true, p3_3));
expected = u.mrs(new MaterialRevision[]{
u.mr(git2, true, "git2_3"),
u.mr(p3, true, p3_3)});
finalRevisions = getRevisionsBasedOnDependencies(p4, cruiseConfig, given);
assertThat(finalRevisions, is(expected));
}
@Test
public void shouldResolveDifferentCasesInAPipelineGraphSimilarToOneInGo01() {
GitMaterial git1 = u.wf(new GitMaterial("git1-url"), "git-folder1");
GitMaterial git2 = u.wf(new GitMaterial("git2-url"), "git-folder2");
GitMaterial git3 = u.wf(new GitMaterial("git3-url"), "git-folder3");
GitMaterial git4 = u.wf(new GitMaterial("git4-url"), "git-folder4");
ScheduleTestUtil.AddedPipeline p1 = u.saveConfigWith("P1", u.m(git1));
ScheduleTestUtil.AddedPipeline p2 = u.saveConfigWith("P2", u.m(p1), u.m(git1));
ScheduleTestUtil.AddedPipeline p3 = u.saveConfigWith("P3", u.m(p1), u.m(git2));
ScheduleTestUtil.AddedPipeline p4 = u.saveConfigWith("P4", u.m(p3), u.m(p2), u.m(git3));
ScheduleTestUtil.AddedPipeline p5 = u.saveConfigWith("P5", u.m(p2), u.m(git3), u.m(git4));
ScheduleTestUtil.AddedPipeline p6 = u.saveConfigWith("P6", u.m(p3), u.m(git3), u.m(git4));
ScheduleTestUtil.AddedPipeline p7 = u.saveConfigWith("P7", u.m(p4), u.m(p5), u.m(p6));
ScheduleTestUtil.AddedPipeline p8 = u.saveConfigWith("P8", u.m(p7));
}
@Test
public void shouldThrowExceptionWhenStageHasPassedButIsNotPresentInModificationsTable() {
GitMaterial git = u.wf(new GitMaterial("git1-url"), "git-folder1");
ScheduleTestUtil.AddedPipeline p1 = u.saveConfigWith("P1", u.m(git));
ScheduleTestUtil.AddedPipeline p2 = u.saveConfigWith("P2", u.m(git));
ScheduleTestUtil.AddedPipeline p3 = u.saveConfigWith("P3", u.m(p1), u.m(p2));
u.checkinInOrder(git, "g1", "g2");
String p1_1 = u.runAndPass(p1, "g1");
String p2_1 = u.runAndPass(p2, "g1");
String p3_1 = u.runAndPass(p3, p1_1, p2_1);
String p1_2 = u.runAndPass(p1, "g2");
Pipeline p2_2instance = u.scheduleWith(p2, "g2");
dbHelper.pass(p2_2instance);
MaterialRevisions given = u.mrs(
u.mr(p1, true, p1_2),
u.mr(p2, false, p2_1)
);
MaterialRevisions previous = u.mrs(
u.mr(p1, true, p1_1),
u.mr(p2, true, p2_1)
);
try {
getBuildCause(p3, given, previous);
fail();
} catch (NoModificationsPresentForDependentMaterialException exception) {
assertThat(exception.getMessage(), Matchers.containsString(p2_2instance.getFirstStage().getIdentifier().getStageLocator()));
}
}
@Test
public void shouldComputeRevisionCorrectlyWhen_MoreThan1UpstreamPipelineHasMinimumRevision() throws Exception {
/* +----------
/-->P1---\ v
git------> P3 -> P4
\-->P2--/ ^
+----------
*/
GitMaterial git = u.wf(new GitMaterial("git"), "f");
ScheduleTestUtil.AddedPipeline p1 = u.saveConfigWith("p1", u.m(git));
ScheduleTestUtil.AddedPipeline p2 = u.saveConfigWith("p2", u.m(git));
ScheduleTestUtil.AddedPipeline p3 = u.saveConfigWith("p3", u.m(p1), u.m(p2), u.m(git));
ScheduleTestUtil.AddedPipeline p4 = u.saveConfigWith("p4", u.m(p1), u.m(p2), u.m(p3));
int i = 0;
u.checkinInOrder(git, u.d(i++), "g0");
String p1_1 = u.runAndPass(p1, "g0");
String p2_1 = u.runAndPass(p2, "g0");
String p3_1 = u.runAndPass(p3, p1_1, p2_1, "g0");
String p4_1 = u.runAndPass(p4, p1_1, p2_1, p3_1);
u.checkinInOrder(git, u.d(i++), "g1");
String p1_2 = u.runAndPass(p1, "g1");
u.checkinInOrder(git, u.d(i++), "g2");
String p2_2 = u.runAndPass(p2, "g2");
String p3_2 = u.runAndPass(p3, p1_2, p2_2, "g2");
u.checkinInOrder(git, u.d(i++), "g3");
String p1_3 = u.runAndPass(p1, "g3");
String p2_3 = u.runAndPass(p2, "g3");
String p3_3 = u.runAndPass(p3, p1_3, p2_3, "g3");
String p4_2 = u.runAndPass(p4, p1_3, p2_3, p3_3);
String p2_4 = u.runAndPass(p2, "g1");
String p3_4 = u.runAndPass(p3, p1_2, p2_4, "g1");
MaterialRevisions given = u.mrs(
u.mr(p1, true, p1_3),
u.mr(p2, true, p2_4),
u.mr(p3, true, p3_4));
MaterialRevisions expected = u.mrs(
u.mr(p1, true, p1_2),
u.mr(p2, true, p2_4),
u.mr(p3, true, p3_4));
MaterialRevisions revisionsBasedOnDependencies = getRevisionsBasedOnDependencies(p4, goConfigDao.load(), given);
assertThat(revisionsBasedOnDependencies, is(expected));
}
@Test
public void shouldResolveWithNoPassedBuildOfRootNode() throws Exception {
/**
* git -------+
* | |
* | |
* v v
* P1 -----> P2
*/
GitMaterial git = u.wf(new GitMaterial("git"), "folder1");
String[] git_revs = {"g1"};
int i = 0;
u.checkinInOrder(git, u.d(i++), git_revs);
ScheduleTestUtil.AddedPipeline p1 = u.saveConfigWith("P1", u.m(git));
ScheduleTestUtil.AddedPipeline p2 = u.saveConfigWith("P2", u.m(p1), u.m(git));
CruiseConfig cruiseConfig = goConfigDao.load();
String p1_1 = u.runAndPassWithGivenMDUTimestampAndRevisionStrings(p1, u.d(i++), "g1");
setMaxBackTrackLimit(2);
for (int j = 1; j <= maxBackTrackLimit() + 1; j++) {
u.runAndFail(p2, u.d(i++), p1_1, "g1");
}
MaterialRevisions given = u.mrs(u.mr(p1, true, p1_1), u.mr(git, true, "g1"));
MaterialRevisions finalRevisions = getRevisionsBasedOnDependencies(p2, cruiseConfig, given);
assertThat(finalRevisions, is(given));
}
@Test
public void shouldResolveWithModifiedStageDefinitionOfRootNode() throws Exception {
/**
* git -------+
* | |
* | |
* v v
* P1 -----> P2
*/
GitMaterial git = u.wf(new GitMaterial("git"), "folder1");
String[] git_revs = {"g1", "g2"};
int i = 0;
u.checkinInOrder(git, u.d(i++), git_revs);
ScheduleTestUtil.AddedPipeline p1 = u.saveConfigWith("P1", u.m(git));
ScheduleTestUtil.AddedPipeline p2 = u.saveConfigWith("P2", u.m(p1), u.m(git));
String p1_1 = u.runAndPassWithGivenMDUTimestampAndRevisionStrings(p1, u.d(i++), "g1");
setMaxBackTrackLimit(2);
for (int j = 1; j <= maxBackTrackLimit() + 1; j++) {
u.runAndPassWithGivenMDUTimestampAndRevisionStrings(p2, u.d(i++), p1_1, "g1");
}
String p1_2 = u.runAndPassWithGivenMDUTimestampAndRevisionStrings(p1, u.d(i++), "g2");
p2 = u.changeStagenameForToPipeline("P2", "s", "new-stage");
CruiseConfig cruiseConfig = goConfigDao.load();
MaterialRevisions given = u.mrs(u.mr(p1, true, p1_2), u.mr(git, true, "g2"));
MaterialRevisions finalRevisions = getRevisionsBasedOnDependencies(p2, cruiseConfig, given);
assertThat(finalRevisions, is(given));
}
@Test
public void shouldConsiderFailedBuildOfRootNodeForFinalRevisionComputation() {
/*
+---> P1 ---+
| v
git-------> P2
*/
int i = 1;
GitMaterial git1 = u.wf(new GitMaterial("git1"), "folder");
String[] git_revs1 = {"g11", "g12", "g13"};
u.checkinInOrder(git1, u.d(i++), git_revs1);
ScheduleTestUtil.AddedPipeline p1 = u.saveConfigWith("p1", u.m(git1));
ScheduleTestUtil.AddedPipeline p2 = u.saveConfigWith("p2", u.m(git1), u.m(p1));
CruiseConfig cruiseConfig = goConfigDao.load();
String p1_1 = u.runAndPassWithGivenMDUTimestampAndRevisionStrings(p1, u.d(i++), "g11");
String p2_1 = u.runAndPassWithGivenMDUTimestampAndRevisionStrings(p2, u.d(i++), "g11", p1_1);
String p1_2 = u.runAndPassWithGivenMDUTimestampAndRevisionStrings(p1, u.d(i++), "g12");
String p2_2 = u.runAndFail(p2, u.d(i++), "g12", p1_2);
String p1_3 = u.runAndPassWithGivenMDUTimestampAndRevisionStrings(p1, u.d(i++), "g13");
MaterialRevisions given = u.mrs(
u.mr(p1, true, p1_3),
u.mr(git1, true, "g13"));
assertThat(getRevisionsBasedOnDependencies(p2, cruiseConfig, given), is(given));
}
@Test
public void shouldResolveWithModifiedMaterialDefinitionOfRoot() throws Exception {
/*
+---> P1 ---+
| v
git-------> P2 <---- hg
*/
int i = 1;
GitMaterial git1 = u.wf(new GitMaterial("git1"), "folder");
String[] git_revs1 = {"g11"};
u.checkinInOrder(git1, u.d(i++), git_revs1);
HgMaterial hg = u.wf(new HgMaterial("hg", null), "folder1");
String[] hg_revs1 = {"h11"};
u.checkinInOrder(hg, u.d(i++), hg_revs1);
ScheduleTestUtil.AddedPipeline p1 = u.saveConfigWith("p1", u.m(git1));
ScheduleTestUtil.AddedPipeline p2 = u.saveConfigWith("p2", u.m(git1), u.m(p1));
String p1_1 = u.runAndPassWithGivenMDUTimestampAndRevisionStrings(p1, u.d(i++), "g11");
String p2_1 = u.runAndPassWithGivenMDUTimestampAndRevisionStrings(p2, u.d(i++), "g11", p1_1);
p2 = u.addMaterialToPipeline(p2, u.m(hg));
MaterialRevisions given = u.mrs(
u.mr(git1, true, "g11"),
u.mr(p1, true, p1_1),
u.mr(hg, true, "h11"));
assertThat(getRevisionsBasedOnDependencies(p2, goConfigDao.load(), given), is(given));
}
@Test
public void shouldResolveTriangleDependencyWithPackageMaterial() {
/*
+---> P1 ---+
| v
pkg1-------> P2
*/
int i = 1;
PackageMaterial pkg1 = MaterialsMother.packageMaterial();
u.addPackageDefinition((PackageMaterialConfig) pkg1.config());
String[] pkg_revs1 = {"pkg1-1", "pkg1-2"};
u.checkinInOrder(pkg1, u.d(i++), pkg_revs1);
ScheduleTestUtil.AddedPipeline p1 = u.saveConfigWith("p1", u.m(pkg1));
ScheduleTestUtil.AddedPipeline p2 = u.saveConfigWith("p2", u.m(pkg1), u.m(p1));
String p1_1 = u.runAndPassWithGivenMDUTimestampAndRevisionStrings(p1, u.d(i++), "pkg1-1");
String p2_1 = u.runAndPassWithGivenMDUTimestampAndRevisionStrings(p2, u.d(i++), "pkg1-1", p1_1);
MaterialRevisions given = u.mrs(
u.mr(pkg1, true, "pkg1-2"),
u.mr(p1, true, p1_1));
MaterialRevisions expected = u.mrs(
u.mr(pkg1, true, "pkg1-1"),
u.mr(p1, true, p1_1));
assertThat(getRevisionsBasedOnDependencies(p2, goConfigDao.load(), given), is(expected));
}
@Test
public void shouldResolveDiamondDependencyWithPackageMaterial() {
/*
+---> P1 ---+
| v
pkg1 P3
| ^
+--> P2 ----+
*/
int i = 1;
PackageMaterial pkg1 = MaterialsMother.packageMaterial();
u.addPackageDefinition((PackageMaterialConfig) pkg1.config());
String[] pkg_revs = {"pkg1-1", "pkg1-2"};
u.checkinInOrder(pkg1, u.d(i++), pkg_revs);
ScheduleTestUtil.AddedPipeline p1 = u.saveConfigWith("p1", u.m(pkg1));
ScheduleTestUtil.AddedPipeline p2 = u.saveConfigWith("p2", u.m(pkg1));
ScheduleTestUtil.AddedPipeline p3 = u.saveConfigWith("p3", u.m(p1), u.m(p2));
String p1_1 = u.runAndPassWithGivenMDUTimestampAndRevisionStrings(p1, u.d(i++), "pkg1-1");
String p2_1 = u.runAndPassWithGivenMDUTimestampAndRevisionStrings(p2, u.d(i++), "pkg1-1");
String p2_2 = u.runAndPassWithGivenMDUTimestampAndRevisionStrings(p2, u.d(i++), "pkg1-2");
MaterialRevisions given = u.mrs(
u.mr(p1, true, p1_1),
u.mr(p2, true, p2_2));
MaterialRevisions expected = u.mrs(
u.mr(p1, true, p1_1),
u.mr(p2, true, p2_1));
assertThat(getRevisionsBasedOnDependencies(p3, goConfigDao.load(), given), is(expected));
}
@Test
public void shouldResolveDiamondDependencyWithPluggableSCMMaterial() {
/*
+---> P1 ---+
| v
scm1 P3
| ^
+--> P2 ----+
*/
int i = 1;
PluggableSCMMaterial pluggableSCMMaterial = MaterialsMother.pluggableSCMMaterial();
u.addSCMConfig(pluggableSCMMaterial.getScmConfig());
String[] pkg_revs = {"scm1-1", "scm1-2"};
u.checkinInOrder(pluggableSCMMaterial, u.d(i++), pkg_revs);
ScheduleTestUtil.AddedPipeline p1 = u.saveConfigWith("p1", u.m(pluggableSCMMaterial));
ScheduleTestUtil.AddedPipeline p2 = u.saveConfigWith("p2", u.m(pluggableSCMMaterial));
ScheduleTestUtil.AddedPipeline p3 = u.saveConfigWith("p3", u.m(p1), u.m(p2));
String p1_1 = u.runAndPassWithGivenMDUTimestampAndRevisionStrings(p1, u.d(i++), "scm1-1");
String p2_1 = u.runAndPassWithGivenMDUTimestampAndRevisionStrings(p2, u.d(i++), "scm1-1");
String p2_2 = u.runAndPassWithGivenMDUTimestampAndRevisionStrings(p2, u.d(i++), "scm1-2");
MaterialRevisions given = u.mrs(
u.mr(p1, true, p1_1),
u.mr(p2, true, p2_2));
MaterialRevisions expected = u.mrs(
u.mr(p1, true, p1_1),
u.mr(p2, true, p2_1));
assertThat(getRevisionsBasedOnDependencies(p3, goConfigDao.load(), given), is(expected));
}
@Test
public void shouldResolveDiamondDependencyWithChildrenDependingOnDifferentStageDependency() throws Exception {
/*
+---> P3 ---+
| v
pkg -> P2 P5
| ^
+--> P4 ----+
*/
GitMaterial git = u.wf(new GitMaterial("git"), "f");
u.checkinInOrder(git, "g1");
ScheduleTestUtil.AddedPipeline p2_s1 = u.saveConfigWith("p2", "s1", u.m(git));
ScheduleTestUtil.AddedPipeline p2_s2 = u.addStageToPipeline(p2_s1.config.name(), "s2");
ScheduleTestUtil.AddedPipeline p3 = u.saveConfigWith("p3", u.m(new DependencyMaterial(p2_s1.config.name(), new CaseInsensitiveString("s1"))));
ScheduleTestUtil.AddedPipeline p4 = u.saveConfigWith("p4", u.m(new DependencyMaterial(p2_s1.config.name(), new CaseInsensitiveString("s2"))));
ScheduleTestUtil.AddedPipeline p5 = u.saveConfigWith("p5", u.m(p3), u.m(p4));
String p2_s1_1 = u.runAndPass(p2_s1, "g1");
String p2_s2_1 = u.runAndPass(p2_s2, "g1");
String p3_1 = u.runAndPass(p3, p2_s1_1);
String p4_1 = u.runAndPass(p4, p2_s2_1);
MaterialRevisions given = u.mrs(
u.mr(p3, true, p3_1),
u.mr(p4, true, p4_1));
MaterialRevisions revisionsBasedOnDependencies = getRevisionsBasedOnDependencies(p5, goConfigDao.load(), given);
assertThat(revisionsBasedOnDependencies, is(given));
}
private BuildCause getBuildCause(ScheduleTestUtil.AddedPipeline staging, MaterialRevisions given, MaterialRevisions previous) {
AutoBuild autoBuild = new AutoBuild(goConfigService, pipelineService, staging.config.name().toString(), systemEnvironment, materialChecker);
pipelineTimeline.update();
return autoBuild.onModifications(given, false, previous);
}
private MaterialRevisions getRevisionsBasedOnDependencies(ScheduleTestUtil.AddedPipeline pipeline, CruiseConfig cruiseConfig, MaterialRevisions given) {
pipelineTimeline.update();
return pipelineService.getRevisionsBasedOnDependencies(given, cruiseConfig, pipeline.config.name());
}
}
| |
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package dai;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.swing.JOptionPane;
import javax.swing.UIManager;
import javax.swing.UnsupportedLookAndFeelException;
/**
*
* @author Emmanuel
*/
public class Dai {
/**
* @param args the command line arguments
*/
public static final int GUESS_LENGTH = 4;
public static final char UNKNOWN = '-';
public static final String DEF_OPTIONS = "0123456789";
public static final String numRegExp = "\\d+";
private static String number = "";
public static void print(Object obj) {
System.out.print(obj);
}
public static void println(Object obj) {
System.out.println(obj);
}
public static void println() {
System.out.println("");
}
public static void main(String[] args) {
Guess guess = new Guess(number, 0, 0);
try {
UIManager.setLookAndFeel("javax.swing.plaf.nimbus.NimbusLookAndFeel");
} catch (ClassNotFoundException ex) {
Logger.getLogger(Dai.class.getName()).log(Level.SEVERE, null, ex);
} catch (InstantiationException ex) {
Logger.getLogger(Dai.class.getName()).log(Level.SEVERE, null, ex);
} catch (IllegalAccessException ex) {
Logger.getLogger(Dai.class.getName()).log(Level.SEVERE, null, ex);
} catch (UnsupportedLookAndFeelException ex) {
Logger.getLogger(Dai.class.getName()).log(Level.SEVERE, null, ex);
}
JOptionPane.showMessageDialog(null, "Please pick a valid 'DAI' (4 digit) number");
DaiSolver solver = new SenSolver();
Integer possible = SenSolver.instance((SenSolver) solver).getPossible().size();
if (number != null) {
Dai.println(number + "\n\nSolver starts");
do {
solver.guess();
guess = solver.getGuess();
String dead = "";
while (dead.isEmpty() || !Dai.isNumber(dead) || dead.length() > 1 || Integer.parseInt(dead) > 4 || Integer.parseInt(dead) < 0) {
dead = JOptionPane.showInputDialog(null, "Number of Dead (Enter valid answer)", "Guess" + solver.getGuesses().size() + ": " + guess.getNumber(), 3);
if (dead == null) {
break;
}
}
if (dead == null) {
break;
}
guess.setDead(dead);
if (guess.getDead() < 4) {
String injured = "";
while (injured.isEmpty() || !Dai.isNumber(injured) || injured.length() > 1 || Integer.parseInt(injured) > 4 || Integer.parseInt(injured) < 0) {
injured = JOptionPane.showInputDialog(null, "Number of Injured (Enter valid answer)", "Guess" + solver.getGuesses().size() + ": " + guess.getNumber(), 3);
if (injured == null) {
break;
}
}
if (injured == null) {
break;
}
guess.setInjured(injured);
}
println("Guess" + (solver.guesses.size()) + "\t: " + solver.getGuess().toString());
solver.infere();
possible = SenSolver.instance((SenSolver) solver).getPossible().size();
if (guess.getDAI() > 4 || possible < 0) {
possible = 0;
break;
}
} while (guess.getDead() < 4 && solver.getGuesses().size() < 10 && possible > 0);
}
String msg = "";
if (solver.getGuess() != null && !(solver.getGuess().getDead() < 4)) {
msg = "YAY!!! I WIN.";
} else if (!(possible > 0)) {
msg = "ERROR! WRONG INFORMATION SUPPLIED.";
} else if (!(solver.getGuesses().size() < 10)) {
msg = "I GIVE UP! YOU WIN.";
} else {
msg = "WHAT HAPPENED, DID YOU GIVE UP?";
}
println(msg);
JOptionPane.showMessageDialog(null, msg);
}
private static Integer getDead(Guess guess) {
Integer dead = 0;
for (int i = 0; i < number.length(); i++) {
if (number.charAt(i) == guess.getNumber().charAt(i)) {
dead++;
}
}
return dead;
}
private static Integer getInjured(Guess guess) {
Integer injured = 0;
for (int i = 0; i < number.length(); i++) {
for (int j = 0; j < guess.getNumber().length(); j++) {
if (number.charAt(i) == guess.getNumber().charAt(j) && i != j) {
injured++;
}
}
}
return injured;
}
public static boolean isNumber(String number) {
Boolean is = number.matches(numRegExp);
//Dai.print(number+" is number = "+is);
return is;
}
public static String not(String number, String container) {
String value = container;
for (int i = 0; i < number.length(); i++) {
value = value.replaceAll("" + number.charAt(i), "");
}
//Dai.println(container+" !"+number+" = "+value);
return value;
}
public static Integer max(Integer num1, Integer num2) {
Integer max = 0;
return max;
}
public static int count(char c, String s) {
int _count = 0;
for (int i = 0; i < s.length(); i++) {
if (c == s.charAt(i) && c != UNKNOWN) {
_count++;
}
}
//Dai.println(c+" in "+s+" = "+_count);
return _count;
}
public static Boolean conflict(String num) {
Boolean thereIs = false;
for (int i = 0; i < num.length(); i++) {
thereIs = (count(num.charAt(i), num) > 1);
if (thereIs) {
break;
}
}
return thereIs;
}
public static Boolean conflict(String num1, String num2) {
Boolean thereIs = false;
for (int i = 0; i < num2.length(); i++) {
thereIs = (count(num2.charAt(i), num1) > 0);
if (thereIs) {
break;
}
}
//Dai.println("Conflict btw '"+num1+"' and '"+num2+"' = "+thereIs.toString());
return thereIs;
}
public static String conflicts(String num1, String num2) {
String locus = "";
for (int i = 0; i < num2.length(); i++) {
if (count(num2.charAt(i), num1) > 0) {
locus += num2.charAt(i);
}
}
return locus;
}
public static String conflictZones(String num1, String num2) {
String zones = "";
for (int i = 0; i < num2.length(); i++) {
if (count(num2.charAt(i), num1) > 0) {
zones += i;
}
}
return zones;
}
public static String perfectMatches(String num1, String num2) {
String locus = "";
Integer length = Math.max(num1.length(), num2.length());
num1 = num1.substring(0, length);
num2 = num2.substring(0, length);
for (int i = 0; i < num1.length(); i++) {
//Dai.println(num1+" == "+num2+" : "+num1.substring(i, i + 1).equals(num2.substring(i, i + 1)));
if (count(num2.charAt(i), num1) > 0 && num1.substring(i, i + 1).equals(num2.substring(i, i + 1))) {
locus += num2.charAt(i);
}
}
return locus;
}
public static String imperfectMatches(String num1, String num2) {
String locus = "";
Integer length = Math.max(num1.length(), num2.length());
num1 = num1.substring(0, length);
num2 = num2.substring(0, length);
for (int i = 0; i < num1.length(); i++) {
if (count(num2.charAt(i), num1) > 0 && !num1.substring(i, i + 1).equals(num2.substring(i, i + 1))) {
locus += num2.charAt(i);
}
}
return locus;
}
public static String normalise(String abnorm, Integer length) {
String norm = "";
if (abnorm.length() > length) {
norm = abnorm.substring(0, length);
} else {
norm = abnorm;
while (norm.length() < length) {
norm = "0" + norm;
}
}
return norm;
}
public static String reverse(String number) {
String reversi = "";
while (reversi.length() < number.length()) {
Integer startIdx = number.length() - reversi.length() - 1;
reversi += number.substring(startIdx, startIdx + 1);
}
return reversi;
}
public static Integer factorial(Integer number) {
Integer fn = 1;
while (number > 0) {
fn *= number--;
}
return fn;
}
public static Integer permutation(Integer number, Integer radius) {
Integer nPr = factorial(number) / factorial(number - radius);
return nPr;
}
public static Integer combination(Integer number, Integer radius) {
Integer nCr = factorial(number) / (factorial(radius) * factorial(number - radius));
return nCr;
}
}
| |
/*
* Copyright 2002-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.aop.framework.autoproxy;
import java.io.IOException;
import org.junit.Test;
import test.mixin.Lockable;
import org.springframework.aop.Advisor;
import org.springframework.aop.framework.Advised;
import org.springframework.aop.framework.autoproxy.target.AbstractBeanFactoryBasedTargetSourceCreator;
import org.springframework.aop.support.AopUtils;
import org.springframework.aop.target.AbstractBeanFactoryBasedTargetSource;
import org.springframework.aop.target.CommonsPool2TargetSource;
import org.springframework.aop.target.LazyInitTargetSource;
import org.springframework.aop.target.PrototypeTargetSource;
import org.springframework.aop.target.ThreadLocalTargetSource;
import org.springframework.beans.factory.BeanFactory;
import org.springframework.context.support.ClassPathXmlApplicationContext;
import org.springframework.tests.aop.advice.CountingBeforeAdvice;
import org.springframework.tests.aop.interceptor.NopInterceptor;
import org.springframework.tests.sample.beans.CountingTestBean;
import org.springframework.tests.sample.beans.ITestBean;
import static org.junit.Assert.*;
/**
* Tests for auto proxy creation by advisor recognition.
*
* @see org.springframework.aop.framework.autoproxy.AdvisorAutoProxyCreatorIntegrationTests
*
* @author Rod Johnson
* @author Dave Syer
* @author Chris Beams
*/
@SuppressWarnings("resource")
public class AdvisorAutoProxyCreatorTests {
private static final Class<?> CLASS = AdvisorAutoProxyCreatorTests.class;
private static final String CLASSNAME = CLASS.getSimpleName();
private static final String DEFAULT_CONTEXT = CLASSNAME + "-context.xml";
private static final String COMMON_INTERCEPTORS_CONTEXT = CLASSNAME + "-common-interceptors.xml";
private static final String CUSTOM_TARGETSOURCE_CONTEXT = CLASSNAME + "-custom-targetsource.xml";
private static final String QUICK_TARGETSOURCE_CONTEXT = CLASSNAME + "-quick-targetsource.xml";
private static final String OPTIMIZED_CONTEXT = CLASSNAME + "-optimized.xml";
/**
* Return a bean factory with attributes and EnterpriseServices configured.
*/
protected BeanFactory getBeanFactory() throws IOException {
return new ClassPathXmlApplicationContext(DEFAULT_CONTEXT, CLASS);
}
/**
* Check that we can provide a common interceptor that will
* appear in the chain before "specific" interceptors,
* which are sourced from matching advisors
*/
@Test
public void testCommonInterceptorAndAdvisor() throws Exception {
BeanFactory bf = new ClassPathXmlApplicationContext(COMMON_INTERCEPTORS_CONTEXT, CLASS);
ITestBean test1 = (ITestBean) bf.getBean("test1");
assertTrue(AopUtils.isAopProxy(test1));
Lockable lockable1 = (Lockable) test1;
NopInterceptor nop1 = (NopInterceptor) bf.getBean("nopInterceptor");
NopInterceptor nop2 = (NopInterceptor) bf.getBean("pointcutAdvisor", Advisor.class).getAdvice();
ITestBean test2 = (ITestBean) bf.getBean("test2");
Lockable lockable2 = (Lockable) test2;
// Locking should be independent; nop is shared
assertFalse(lockable1.locked());
assertFalse(lockable2.locked());
// equals 2 calls on shared nop, because it's first and sees calls
// against the Lockable interface introduced by the specific advisor
assertEquals(2, nop1.getCount());
assertEquals(0, nop2.getCount());
lockable1.lock();
assertTrue(lockable1.locked());
assertFalse(lockable2.locked());
assertEquals(5, nop1.getCount());
assertEquals(0, nop2.getCount());
PackageVisibleMethod packageVisibleMethod = (PackageVisibleMethod) bf.getBean("packageVisibleMethod");
assertEquals(5, nop1.getCount());
assertEquals(0, nop2.getCount());
packageVisibleMethod.doSomething();
assertEquals(6, nop1.getCount());
assertEquals(1, nop2.getCount());
assertTrue(packageVisibleMethod instanceof Lockable);
Lockable lockable3 = (Lockable) packageVisibleMethod;
lockable3.lock();
assertTrue(lockable3.locked());
lockable3.unlock();
assertFalse(lockable3.locked());
}
/**
* We have custom TargetSourceCreators but there's no match, and
* hence no proxying, for this bean
*/
@Test
public void testCustomTargetSourceNoMatch() throws Exception {
BeanFactory bf = new ClassPathXmlApplicationContext(CUSTOM_TARGETSOURCE_CONTEXT, CLASS);
ITestBean test = (ITestBean) bf.getBean("test");
assertFalse(AopUtils.isAopProxy(test));
assertEquals("Rod", test.getName());
assertEquals("Kerry", test.getSpouse().getName());
}
@Test
public void testCustomPrototypeTargetSource() throws Exception {
CountingTestBean.count = 0;
BeanFactory bf = new ClassPathXmlApplicationContext(CUSTOM_TARGETSOURCE_CONTEXT, CLASS);
ITestBean test = (ITestBean) bf.getBean("prototypeTest");
assertTrue(AopUtils.isAopProxy(test));
Advised advised = (Advised) test;
assertTrue(advised.getTargetSource() instanceof PrototypeTargetSource);
assertEquals("Rod", test.getName());
// Check that references survived prototype creation
assertEquals("Kerry", test.getSpouse().getName());
assertEquals("Only 2 CountingTestBeans instantiated", 2, CountingTestBean.count);
CountingTestBean.count = 0;
}
@Test
public void testLazyInitTargetSource() throws Exception {
CountingTestBean.count = 0;
BeanFactory bf = new ClassPathXmlApplicationContext(CUSTOM_TARGETSOURCE_CONTEXT, CLASS);
ITestBean test = (ITestBean) bf.getBean("lazyInitTest");
assertTrue(AopUtils.isAopProxy(test));
Advised advised = (Advised) test;
assertTrue(advised.getTargetSource() instanceof LazyInitTargetSource);
assertEquals("No CountingTestBean instantiated yet", 0, CountingTestBean.count);
assertEquals("Rod", test.getName());
assertEquals("Kerry", test.getSpouse().getName());
assertEquals("Only 1 CountingTestBean instantiated", 1, CountingTestBean.count);
CountingTestBean.count = 0;
}
@Test
public void testQuickTargetSourceCreator() throws Exception {
ClassPathXmlApplicationContext bf =
new ClassPathXmlApplicationContext(QUICK_TARGETSOURCE_CONTEXT, CLASS);
ITestBean test = (ITestBean) bf.getBean("test");
assertFalse(AopUtils.isAopProxy(test));
assertEquals("Rod", test.getName());
// Check that references survived pooling
assertEquals("Kerry", test.getSpouse().getName());
// Now test the pooled one
test = (ITestBean) bf.getBean(":test");
assertTrue(AopUtils.isAopProxy(test));
Advised advised = (Advised) test;
assertTrue(advised.getTargetSource() instanceof CommonsPool2TargetSource);
assertEquals("Rod", test.getName());
// Check that references survived pooling
assertEquals("Kerry", test.getSpouse().getName());
// Now test the ThreadLocal one
test = (ITestBean) bf.getBean("%test");
assertTrue(AopUtils.isAopProxy(test));
advised = (Advised) test;
assertTrue(advised.getTargetSource() instanceof ThreadLocalTargetSource);
assertEquals("Rod", test.getName());
// Check that references survived pooling
assertEquals("Kerry", test.getSpouse().getName());
// Now test the Prototype TargetSource
test = (ITestBean) bf.getBean("!test");
assertTrue(AopUtils.isAopProxy(test));
advised = (Advised) test;
assertTrue(advised.getTargetSource() instanceof PrototypeTargetSource);
assertEquals("Rod", test.getName());
// Check that references survived pooling
assertEquals("Kerry", test.getSpouse().getName());
ITestBean test2 = (ITestBean) bf.getBean("!test");
assertFalse("Prototypes cannot be the same object", test == test2);
assertEquals("Rod", test2.getName());
assertEquals("Kerry", test2.getSpouse().getName());
bf.close();
}
@Test
public void testWithOptimizedProxy() throws Exception {
BeanFactory beanFactory = new ClassPathXmlApplicationContext(OPTIMIZED_CONTEXT, CLASS);
ITestBean testBean = (ITestBean) beanFactory.getBean("optimizedTestBean");
assertTrue(AopUtils.isAopProxy(testBean));
CountingBeforeAdvice beforeAdvice = (CountingBeforeAdvice) beanFactory.getBean("countingAdvice");
testBean.setAge(23);
testBean.getAge();
assertEquals("Incorrect number of calls to proxy", 2, beforeAdvice.getCalls());
}
}
class SelectivePrototypeTargetSourceCreator extends AbstractBeanFactoryBasedTargetSourceCreator {
@Override
protected AbstractBeanFactoryBasedTargetSource createBeanFactoryBasedTargetSource(
Class<?> beanClass, String beanName) {
if (!beanName.startsWith("prototype")) {
return null;
}
return new PrototypeTargetSource();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.runners.samza.adapter;
import static org.apache.beam.runners.samza.adapter.TestSourceHelpers.createElementMessage;
import static org.apache.beam.runners.samza.adapter.TestSourceHelpers.createWatermarkMessage;
import static org.apache.beam.runners.samza.adapter.TestSourceHelpers.expectWrappedException;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Base64;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import org.apache.beam.runners.samza.SamzaPipelineOptions;
import org.apache.beam.runners.samza.adapter.TestUnboundedSource.SplittableBuilder;
import org.apache.beam.runners.samza.metrics.SamzaMetricsContainer;
import org.apache.beam.runners.samza.runtime.OpMessage;
import org.apache.beam.sdk.coders.Coder;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import org.apache.samza.Partition;
import org.apache.samza.metrics.MetricsRegistryMap;
import org.apache.samza.system.IncomingMessageEnvelope;
import org.apache.samza.system.MessageType;
import org.apache.samza.system.SystemConsumer;
import org.apache.samza.system.SystemStreamPartition;
import org.joda.time.Instant;
import org.junit.Test;
/** Tests for {@link UnboundedSourceSystem}. */
public class UnboundedSourceSystemTest {
// A reasonable time to wait to get all messages from the source assuming no blocking.
private static final long DEFAULT_TIMEOUT_MILLIS = 1000;
private static final long DEFAULT_WATERMARK_TIMEOUT_MILLIS = 1000;
private static final SystemStreamPartition DEFAULT_SSP =
new SystemStreamPartition("default-system", "default-system", new Partition(0));
private static final Coder<TestCheckpointMark> CHECKPOINT_MARK_CODER =
TestUnboundedSource.createBuilder().build().getCheckpointMarkCoder();
@Test
public void testConsumerStartStop() throws IOException, InterruptedException {
final TestUnboundedSource<String> source = TestUnboundedSource.<String>createBuilder().build();
final UnboundedSourceSystem.Consumer<String, TestCheckpointMark> consumer =
createConsumer(source);
consumer.register(DEFAULT_SSP, offset(0));
consumer.start();
assertEquals(
Collections.EMPTY_LIST,
consumeUntilTimeoutOrWatermark(consumer, DEFAULT_SSP, DEFAULT_TIMEOUT_MILLIS));
consumer.stop();
}
@Test
public void testConsumeOneMessage() throws IOException, InterruptedException {
final TestUnboundedSource<String> source =
TestUnboundedSource.<String>createBuilder().addElements("test").build();
final UnboundedSourceSystem.Consumer<String, TestCheckpointMark> consumer =
createConsumer(source);
consumer.register(DEFAULT_SSP, null);
consumer.start();
assertEquals(
Arrays.asList(
createElementMessage(
DEFAULT_SSP, offset(0), "test", BoundedWindow.TIMESTAMP_MIN_VALUE)),
consumeUntilTimeoutOrWatermark(consumer, DEFAULT_SSP, DEFAULT_TIMEOUT_MILLIS));
consumer.stop();
}
@Test
public void testAdvanceTimestamp() throws IOException, InterruptedException {
final Instant timestamp = Instant.now();
final TestUnboundedSource<String> source =
TestUnboundedSource.<String>createBuilder()
.addElements("before")
.setTimestamp(timestamp)
.addElements("after")
.build();
final UnboundedSourceSystem.Consumer<String, TestCheckpointMark> consumer =
createConsumer(source);
consumer.register(DEFAULT_SSP, null);
consumer.start();
assertEquals(
Arrays.asList(
createElementMessage(
DEFAULT_SSP, offset(0), "before", BoundedWindow.TIMESTAMP_MIN_VALUE),
createElementMessage(DEFAULT_SSP, offset(1), "after", timestamp)),
consumeUntilTimeoutOrWatermark(consumer, DEFAULT_SSP, DEFAULT_TIMEOUT_MILLIS));
consumer.stop();
}
@Test
public void testConsumeMultipleMessages() throws IOException, InterruptedException {
final Instant timestamp = Instant.now();
final TestUnboundedSource<String> source =
TestUnboundedSource.<String>createBuilder()
.setTimestamp(timestamp)
.addElements("test", "a", "few", "messages")
.build();
final UnboundedSourceSystem.Consumer<String, TestCheckpointMark> consumer =
createConsumer(source);
consumer.register(DEFAULT_SSP, null);
consumer.start();
assertEquals(
Arrays.asList(
createElementMessage(DEFAULT_SSP, offset(0), "test", timestamp),
createElementMessage(DEFAULT_SSP, offset(1), "a", timestamp),
createElementMessage(DEFAULT_SSP, offset(2), "few", timestamp),
createElementMessage(DEFAULT_SSP, offset(3), "messages", timestamp)),
consumeUntilTimeoutOrWatermark(consumer, DEFAULT_SSP, DEFAULT_TIMEOUT_MILLIS));
consumer.stop();
}
@Test
public void testAdvanceWatermark() throws IOException, InterruptedException {
final Instant now = Instant.now();
final Instant nowPlusOne = now.plus(1L);
final TestUnboundedSource<String> source =
TestUnboundedSource.<String>createBuilder()
.setTimestamp(now)
.addElements("first")
.setTimestamp(nowPlusOne)
.addElements("second")
.advanceWatermarkTo(now)
.build();
final UnboundedSourceSystem.Consumer<String, TestCheckpointMark> consumer =
createConsumer(source);
consumer.register(DEFAULT_SSP, null);
consumer.start();
assertEquals(
Arrays.asList(
createElementMessage(DEFAULT_SSP, offset(0), "first", now),
createElementMessage(DEFAULT_SSP, offset(1), "second", nowPlusOne),
createWatermarkMessage(DEFAULT_SSP, now)),
consumeUntilTimeoutOrWatermark(consumer, DEFAULT_SSP, DEFAULT_WATERMARK_TIMEOUT_MILLIS));
consumer.stop();
}
@Test
public void testMultipleAdvanceWatermark() throws IOException, InterruptedException {
final Instant now = Instant.now();
final Instant nowPlusOne = now.plus(1L);
final Instant nowPlusTwo = now.plus(2L);
final TestUnboundedSource<String> source =
TestUnboundedSource.<String>createBuilder()
.setTimestamp(now)
.addElements("first")
.advanceWatermarkTo(now)
.noElements() // will output the first watermark
.setTimestamp(nowPlusOne)
.addElements("second")
.setTimestamp(nowPlusTwo)
.addElements("third")
.advanceWatermarkTo(nowPlusOne)
.build();
final UnboundedSourceSystem.Consumer<String, TestCheckpointMark> consumer =
createConsumer(source);
consumer.register(DEFAULT_SSP, null);
consumer.start();
// consume to the first watermark
assertEquals(
Arrays.asList(
createElementMessage(DEFAULT_SSP, offset(0), "first", now),
createWatermarkMessage(DEFAULT_SSP, now)),
consumeUntilTimeoutOrWatermark(consumer, DEFAULT_SSP, DEFAULT_WATERMARK_TIMEOUT_MILLIS));
// consume to the second watermark
assertEquals(
Arrays.asList(
createElementMessage(DEFAULT_SSP, offset(1), "second", nowPlusOne),
createElementMessage(DEFAULT_SSP, offset(2), "third", nowPlusTwo),
createWatermarkMessage(DEFAULT_SSP, nowPlusOne)),
consumeUntilTimeoutOrWatermark(consumer, DEFAULT_SSP, DEFAULT_WATERMARK_TIMEOUT_MILLIS));
consumer.stop();
}
@Test
public void testReaderThrowsAtStart() throws Exception {
final IOException exception = new IOException("Expected exception");
final TestUnboundedSource<String> source =
TestUnboundedSource.<String>createBuilder().addException(exception).build();
final UnboundedSourceSystem.Consumer<String, TestCheckpointMark> consumer =
createConsumer(source);
consumer.register(DEFAULT_SSP, null);
consumer.start();
expectWrappedException(
exception,
() -> consumeUntilTimeoutOrWatermark(consumer, DEFAULT_SSP, DEFAULT_TIMEOUT_MILLIS));
consumer.stop();
}
@Test
public void testReaderThrowsAtAdvance() throws Exception {
final IOException exception = new IOException("Expected exception");
final TestUnboundedSource<String> source =
TestUnboundedSource.<String>createBuilder()
.addElements("test", "a", "few", "good", "messages", "then", "...")
.addException(exception)
.build();
final UnboundedSourceSystem.Consumer<String, TestCheckpointMark> consumer =
createConsumer(source);
consumer.register(DEFAULT_SSP, offset(0));
consumer.start();
expectWrappedException(
exception,
() -> consumeUntilTimeoutOrWatermark(consumer, DEFAULT_SSP, DEFAULT_TIMEOUT_MILLIS));
consumer.stop();
}
@Test
public void testTimeout() throws Exception {
final CountDownLatch advanceLatch = new CountDownLatch(1);
final Instant now = Instant.now();
final Instant nowPlusOne = now.plus(1);
final TestUnboundedSource<String> source =
TestUnboundedSource.<String>createBuilder()
.setTimestamp(now)
.addElements("before")
.addLatch(advanceLatch)
.setTimestamp(nowPlusOne)
.addElements("after")
.advanceWatermarkTo(nowPlusOne)
.build();
final UnboundedSourceSystem.Consumer<String, TestCheckpointMark> consumer =
createConsumer(source);
consumer.register(DEFAULT_SSP, null);
consumer.start();
assertEquals(
Collections.singletonList(createElementMessage(DEFAULT_SSP, offset(0), "before", now)),
consumeUntilTimeoutOrWatermark(consumer, DEFAULT_SSP, DEFAULT_TIMEOUT_MILLIS));
advanceLatch.countDown();
assertEquals(
Arrays.asList(
createElementMessage(DEFAULT_SSP, offset(1), "after", nowPlusOne),
createWatermarkMessage(DEFAULT_SSP, nowPlusOne)),
consumeUntilTimeoutOrWatermark(consumer, DEFAULT_SSP, DEFAULT_TIMEOUT_MILLIS));
consumer.stop();
}
@Test
public void testRestartFromCheckpoint() throws IOException, InterruptedException {
final SplittableBuilder<String> builder = TestUnboundedSource.<String>createSplits(3);
builder.forSplit(0).addElements("split-0");
builder.forSplit(1).addElements("split-1");
builder.forSplit(2).addElements("split-2");
final TestUnboundedSource<String> source = builder.build();
final UnboundedSourceSystem.Consumer<String, TestCheckpointMark> consumer =
createConsumer(source, 3);
consumer.register(ssp(0), offset(10));
consumer.register(ssp(1), offset(5));
consumer.register(ssp(2), offset(8));
consumer.start();
assertEquals(
Arrays.asList(
createElementMessage(ssp(0), offset(11), "split-0", BoundedWindow.TIMESTAMP_MIN_VALUE)),
consumeUntilTimeoutOrWatermark(consumer, ssp(0), DEFAULT_TIMEOUT_MILLIS));
assertEquals(
Arrays.asList(
createElementMessage(ssp(1), offset(6), "split-1", BoundedWindow.TIMESTAMP_MIN_VALUE)),
consumeUntilTimeoutOrWatermark(consumer, ssp(1), DEFAULT_TIMEOUT_MILLIS));
assertEquals(
Arrays.asList(
createElementMessage(ssp(2), offset(9), "split-2", BoundedWindow.TIMESTAMP_MIN_VALUE)),
consumeUntilTimeoutOrWatermark(consumer, ssp(2), DEFAULT_TIMEOUT_MILLIS));
consumer.stop();
}
private static UnboundedSourceSystem.Consumer<String, TestCheckpointMark> createConsumer(
TestUnboundedSource<String> source) {
return createConsumer(source, 1);
}
private static UnboundedSourceSystem.Consumer<String, TestCheckpointMark> createConsumer(
TestUnboundedSource<String> source, int splitNum) {
SamzaPipelineOptions pipelineOptions = PipelineOptionsFactory.as(SamzaPipelineOptions.class);
pipelineOptions.setWatermarkInterval(0L); // emit immediately
pipelineOptions.setMaxSourceParallelism(splitNum);
return new UnboundedSourceSystem.Consumer<>(
source, pipelineOptions, new SamzaMetricsContainer(new MetricsRegistryMap()), "test-step");
}
private static List<IncomingMessageEnvelope> consumeUntilTimeoutOrWatermark(
SystemConsumer consumer, SystemStreamPartition ssp, long timeoutMillis)
throws InterruptedException {
assertTrue("Expected timeoutMillis (" + timeoutMillis + ") >= 0", timeoutMillis >= 0);
final List<IncomingMessageEnvelope> accumulator = new ArrayList<>();
final long start = System.currentTimeMillis();
long now = start;
while (timeoutMillis + start >= now) {
accumulator.addAll(pollOnce(consumer, ssp, now - start - timeoutMillis));
if (!accumulator.isEmpty()
&& MessageType.of(accumulator.get(accumulator.size() - 1).getMessage())
== MessageType.WATERMARK) {
break;
}
now = System.currentTimeMillis();
}
return accumulator;
}
private static OpMessage.Type getMessageType(IncomingMessageEnvelope envelope) {
return ((OpMessage) envelope.getMessage()).getType();
}
private static List<IncomingMessageEnvelope> pollOnce(
SystemConsumer consumer, SystemStreamPartition ssp, long timeoutMillis)
throws InterruptedException {
final Set<SystemStreamPartition> sspSet = Collections.singleton(ssp);
final Map<SystemStreamPartition, List<IncomingMessageEnvelope>> pollResult =
consumer.poll(sspSet, timeoutMillis);
assertEquals(sspSet, pollResult.keySet());
assertNotNull(pollResult.get(ssp));
return pollResult.get(ssp);
}
private static String offset(int offset) throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
CHECKPOINT_MARK_CODER.encode(TestCheckpointMark.of(offset), baos);
return Base64.getEncoder().encodeToString(baos.toByteArray());
}
private static SystemStreamPartition ssp(int partition) {
return new SystemStreamPartition("default-system", "default-system", new Partition(partition));
}
}
| |
package kiwi.test.service.query;
import static org.norecess.antlr.Assert.assertTree;
import kiwi.service.query.kwql.parser.KWQL;
import org.junit.Before;
import org.junit.Test;
import org.norecess.antlr.ANTLRTester;
/**
* The Class KWQLParseTest.
*/
public class KWQLParseTest {
private ANTLRTester myTester;
@Before
public void setUp() {
myTester = new ANTLRTester(new IANTLRFrontEntImpl());
}
/**
* Test value queries.
*/
@Test
public void testValues() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)(STRING(a)))))))",
myTester.scanInput("a").parseAs("kwqlrule"));
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)(OR(STRING(a))(STRING(b))))))))",
myTester.scanInput("a OR b").parseAs("kwqlrule"));
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)(AND(STRING(a))(STRING(b))))))))",
myTester.scanInput("a AND b").parseAs("kwqlrule"));
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)(AND(STRING(a))(STRING(b))))))))",
myTester.scanInput("(a AND b)").parseAs("kwqlrule"));
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)(NOT(STRING(a))))))))",
myTester.scanInput("NOT a").parseAs("kwqlrule"));
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)(NOT(STRING(a))))))))",
myTester.scanInput("NOT (a)").parseAs("kwqlrule"));
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)(NOT(OR(STRING(a))(STRING(b)))))))))",
myTester.scanInput("NOT (a OR b)").parseAs("kwqlrule"));
assertTree(KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)" +
"(OR(STRING(a))(AND(STRING(b))(STRING(c)))))))))", myTester
.scanInput("a OR (b AND c)").parseAs("kwqlrule"));
assertTree(KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)(OR(STRING(a))" +
"(AND(STRING(b))(STRING(c)))))))))", myTester
.scanInput("(a OR (b AND c))").parseAs("kwqlrule"));
assertTree(KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)(OR(STRING(a))" +
"(AND(STRING(b))(NOT(STRING(c))))))))))", myTester
.scanInput("(a OR (b AND NOT c))").parseAs("kwqlrule"));
assertTree(KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)" +
"(NOT(OR(STRING(a))(AND(STRING(b))(NOT(STRING(c)))))))))))", myTester
.scanInput("(NOT (a OR (b AND NOT c)))")
.parseAs("kwqlrule"));
}
/**
* Test complex queries.
*/
@Test
public void testQueries() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE(ci))(QUALIFIER(LABEL(author))(STRING(Mary)))))))",
myTester.scanInput("ci(author:Mary)").parseAs("kwqlrule"));
assertTree(KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE(ci))(QUALIFIER(LABEL(author))(NOT(STRING(name))))))))", myTester.scanInput(
"ci(author:NOT name)").parseAs("kwqlrule"));
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE(ci))(RESOURCE(TYPE(link))(AND(QUALIFIER(LABEL(target))" +
"(RESOURCE(TYPE(ci))(QUALIFIER(LABEL(title))(STRING(bla)))))(RESOURCE(TYPE(tag))" +
"(QUALIFIER(LABEL(name))(STRING(bla))))))))))",
myTester.scanInput(
"ci(link(target:ci(title:bla) tag(name:bla)))")
.parseAs("kwqlrule"));
assertTree(KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE(ci))(RESOURCE(TYPE(fragment))" +
"(QUALIFIER(LABEL(text))(STRING(Java))))))))",
myTester.scanInput("ci(fragment(text:Java))").parseAs("kwqlrule"));
assertTree(KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE(ci))(RESOURCE(TYPE(tag))" +
"(QUALIFIER(LABEL(author))(STRING(\"Mary\"))))))))",
myTester.scanInput("ci(tag(author:\"Mary\"))").parseAs("kwqlrule"));
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE(ci))(AND(RESOURCE(TYPE(tag))" +
"(QUALIFIER(LABEL(name))(STRING(Java))))(RESOURCE(TYPE(tag))" +
"(QUALIFIER(LABEL(name))(STRING(XML)))))))))",
myTester.scanInput("ci(tag(name:Java) tag(name:XML))").parseAs(
"kwqlrule"));
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE(ci))(AND(RESOURCE(TYPE(tag))" +
"(QUALIFIER(LABEL(name))(STRING(Java))))(QUALIFIER(LABEL(child))(RESOURCE(TYPE(ci))" +
"(QUALIFIER(LABEL(author))(STRING(XML))))))))))",
myTester.scanInput("ci(tag(name:Java) child:ci(author:XML))")
.parseAs("kwqlrule"));
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE)(OR(QUALIFIER(LABEL(author))(STRING(Mary)))" +
"(RESOURCE(TYPE(tag))(QUALIFIER(LABEL(name))(STRING(Java)))))))))",
myTester.scanInput("author:Mary OR tag(name:Java)").parseAs(
"kwqlrule"));
assertTree(KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE(ci))(QUALIFIER(LABEL(text))(AND(STRING(Java))" +
"(NOT(STRING(XML)))))))))", myTester
.scanInput("ci(text:(Java AND NOT XML))").parseAs(
"kwqlrule"));
assertTree(KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE(ci))(RESOURCE(TYPE(tag))" +
"(QUALIFIER(LABEL(name))(STRING(AND))))))))", myTester
.scanInput("ci(tag(name:\\AND))").parseAs("kwqlrule"));
assertTree(KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE(ci))(AND(QUALIFIER(LABEL(author))" +
"(VAR(A)))(QUALIFIER(LABEL(title))(VAR(T))))))))",
myTester.scanInput("ci(author:$A title:$T)").parseAs("kwqlrule"));
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE(ci))(QUALIFIER(LABEL(name))(STRING(a))(VAR(s)))))))",
myTester.scanInput("ci(name:a -> $s)").parseAs("kwqlrule"));
assertTree(KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE(ci))(QUALIFIER(LABEL(name))" +
"(OR(STRING(a))(STRING(b)))(VAR(s)))))))", myTester
.scanInput("ci(name:a OR b -> $s)").parseAs("kwqlrule"));
assertTree(KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE(ci))(QUALIFIER(LABEL(name))" +
"(OR(STRING(a))(STRING(b)))(VAR(s)))))))", myTester
.scanInput("ci(name:(a OR b) -> $s)").parseAs("kwqlrule"));
assertTree(KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE(ci))(QUALIFIER(LABEL)(NOT(OR(STRING(a))(STRING(b)))))))))"
, myTester.scanInput(
"ci(NOT (a OR b))").parseAs("kwqlrule"));
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE(ci))(AND(AND(QUALIFIER(LABEL(text))" +
"(STRING(Java)))(QUALIFIER(LABEL(author))(VAR(X))))(OPTIONAL(aa)(RESOURCE(TYPE(tag))" +
"(QUALIFIER(LABEL(name))(VAR(Y))))))))))",
myTester.scanInput(
"ci(text:Java author:$X OPTIONAL(aa) tag(name:$Y))")
.parseAs("kwqlrule"));
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE(ci))(AND(QUALIFIER(LABEL(author))(VAR(x)))" +
"(QUALIFIER(LABEL)(OR(STRING(bla))(STRING(blub)))))))))",
myTester.scanInput("ci(author:$x AND bla OR blub)").parseAs(
"kwqlrule"));
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE(ci))(OR(AND(QUALIFIER(LABEL(author))(VAR(x)))" +
"(QUALIFIER(LABEL)(STRING(bla))))(QUALIFIER(LABEL)(STRING(blub))))))))",
myTester.scanInput("ci((author:$x AND bla) OR blub) ").parseAs(
"kwqlrule"));
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL(author))(STRING(\"a b\")))))))",
myTester.scanInput("author:\"a b\"").parseAs("kwqlrule"));
assertTree(KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE)(AND(QUALIFIER(LABEL(author))(STRING(a)))" +
"(QUALIFIER(LABEL)(STRING(b))))))))",
myTester.scanInput("author:a b").parseAs("kwqlrule"));
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL(author))(AND(STRING(a))(STRING(b))))))))",
myTester.scanInput("author:(a b)").parseAs("kwqlrule"));
}
@Test
public void formerlyFaultyQuery() {
assertTree(
KWQL.RULE,
"(RULE(HEAD(RESOURCE(TYPE(ci))(QUALIFIER(LABEL(title))(STRING(a)))))" +
"(SEL_QUERY(AND(CI_BODY(RESOURCE(TYPE(ci))(AND(QUALIFIER(LABEL(author))" +
"(NOT(STRING(name))))(QUALIFIER(LABEL)(STRING(bla))))))(CI_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)" +
"(STRING(bla))))))))",
myTester.scanInput("ci(title:a)@ci(author:NOT name bla) bla").parseAs("kwqlrule"));
}
@Test
public void tagUri() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE(ci))(RESOURCE(TYPE(tag))" +
"(QUALIFIER(LABEL(URI))(UR(http://www.bla.com))))))))",
myTester.scanInput("ci(tag(URI:http://www.bla.com))").parseAs(
"kwqlrule"));
}
@Test
public void test7() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE(ci))(RESOURCE(TYPE(tag))" +
"(QUALIFIER(LABEL(URI))(UR(http://www.bla.com:8080/wusch))))))))",
myTester
.scanInput("ci(tag(URI:http://www.bla.com:8080/wusch))")
.parseAs("kwqlrule"));
}
@Test(expected = AssertionError.class)
public void test5() {
assertTree(KWQL.RULE, "(RULE(HEAD)(CI_BODY(RESOURCE(TYPE(ci)))))",
myTester.scanInput("ci()").parseAs("kwqlrule"));
}
@Test
public void test6() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE(ci))(AND(QUALIFIER(LABEL(author))" +
"(STRING(a)))(QUALIFIER(LABEL)(STRING(b))))))))",
myTester.scanInput("ci(author:a b)").parseAs("kwqlrule"));
}
@Test
public void test8() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE(ci))(NOT(RESOURCE(TYPE(tag))" +
"(QUALIFIER(LABEL(name))(STRING(bla)))))))))",
myTester.scanInput("ci(NOT(tag(name:bla)))").parseAs("kwqlrule"));
}
@Test
public void test9() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE(ci))(QUALIFIER(LABEL(numberEd))(INTEGER(4)))))))",
myTester.scanInput("ci(numberEd:4)").parseAs("kwqlrule"));
}
@Test
public void test10() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE(ci))(AND(QUALIFIER(LABEL(title))" +
"(STRING(StartPage)))(RESOURCE(TYPE(link))(QUALIFIER(LABEL(target))(RESOURCE(TYPE(ci))" +
"(QUALIFIER(LABEL(title))(STRING(Liste)))))))))))",
myTester.scanInput(
"ci(title:StartPage link(target:ci(title:Liste)))")
.parseAs("kwqlrule"));
}
@Test
public void test11() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE(ci))(AND(AND(QUALIFIER(LABEL(title))" +
"(STRING(bla)))(QUALIFIER(LABEL(child))(RESOURCE(TYPE(fragment))(QUALIFIER(LABEL(text))" +
"(STRING(blub))))))(QUALIFIER(LABEL(title))(STRING(bla))))))))",
myTester.scanInput(
"ci(title:bla child:fragment(text:blub) title:bla)")
.parseAs("kwqlrule"));
}
@Test
public void test12() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE(ci))(AND(AND(QUALIFIER(LABEL(title))" +
"(STRING(bla)))(QUALIFIER(LABEL(child))(RESOURCE(TYPE(ci))(QUALIFIER(LABEL(text))(STRING(blub))))))" +
"(QUALIFIER(LABEL(title))(STRING(bla))))))))",
myTester.scanInput(
"ci(title:bla child:ci(text:blub) title:bla)").parseAs(
"kwqlrule"));
}
@Test
public void test13() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE(ci))(AND(QUALIFIER(LABEL(title))" +
"(STRING(bla)))(QUALIFIER(LABEL(child))(RESOURCE(TYPE(ci))(AND(QUALIFIER(LABEL(text))" +
"(STRING(blub)))(QUALIFIER(LABEL(title))(STRING(bla)))))))))))",
myTester.scanInput(
"ci(title:bla child:ci(text:blub title:bla))").parseAs(
"kwqlrule"));
}
@Test
public void test14() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE(ci))(AND(QUALIFIER(LABEL(title))" +
"(STRING(bla)))(QUALIFIER(LABEL(child))(RESOURCE(TYPE)(QUALIFIER(LABEL)(STRING(a))))))))))",
myTester.scanInput("ci(title:bla child:a)").parseAs("kwqlrule"));
}
@Test
public void test15() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE(ci))(AND(AND(QUALIFIER(LABEL(title))" +
"(STRING(bla)))(QUALIFIER(LABEL(child))(RESOURCE(TYPE)(QUALIFIER(LABEL)(STRING(a))))))" +
"(QUALIFIER(LABEL)(STRING(b))))))))", myTester
.scanInput("ci(title:bla child:a b)").parseAs("kwqlrule"));
}
@Test
public void test16() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE(ci))(AND(AND(QUALIFIER(LABEL(title))(STRING(bla)))" +
"(QUALIFIER(LABEL(child))(RESOURCE(TYPE)(QUALIFIER(LABEL)(AND(STRING(a))(STRING(b)))))))" +
"(QUALIFIER(LABEL)(STRING(c))))))))",
myTester.scanInput("ci(title:bla child:(a b) c)").parseAs(
"kwqlrule"));
}
@Test
public void test17() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE(ci))(AND(QUALIFIER(LABEL(title))" +
"(STRING(bla)))(QUALIFIER(LABEL(child))(RESOURCE(TYPE)(QUALIFIER(LABEL(text))(STRING(b))))))))))",
myTester.scanInput("ci(title:bla child:text:b)")
.parseAs("kwqlrule"));
}
@Test
public void test18() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE(ci))(AND(QUALIFIER(LABEL(title))(STRING(StartPage)))" +
"(RESOURCE(TYPE(link))(QUALIFIER(LABEL(target))(OR(RESOURCE(TYPE(ci))(QUALIFIER(LABEL(title))" +
"(STRING(Orphan))))(RESOURCE(TYPE(ci))(QUALIFIER(LABEL(title))(STRING(Liste))))))))))))",
myTester
.scanInput(
"ci(title:StartPage link(target:(ci(title:Orphan) OR ci(title:Liste))))")
.parseAs("kwqlrule"));
}
@Test
public void test19() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE(ci))(QUALIFIER(LABEL)(STRING(\"bla:wusch:::blawusch\")))))))",
myTester.scanInput("ci(\"bla:wusch:::blawusch\")").parseAs(
"kwqlrule"));
}
@Test
public void test20() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE(ci))" +
"(QUALIFIER(LABEL(created))(DATE(2009-09-23T14:42:11Z)))))))",
myTester.scanInput("ci(created:2009-09-23T14:42:11Z)").parseAs(
"kwqlrule"));
}
@Test
public void test21() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE(ci))(QUALIFIER(LABEL(title))(STRING('bla')))))))",
myTester.scanInput("ci(title:'bla')").parseAs("kwqlrule"));
}
@Test
public void test22() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(OR(RESOURCE(TYPE(ci))(QUALIFIER(LABEL)(STRING(a))))" +
"(RESOURCE(TYPE(ci))(QUALIFIER(LABEL)(STRING(s))))))))", myTester
.scanInput("ci(a) OR ci(s)").parseAs("kwqlrule"));
}
@Test
public void test23() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(OR(RESOURCE(TYPE(ci))(QUALIFIER(LABEL)(STRING(a))))" +
"(RESOURCE(TYPE)(RESOURCE(TYPE(tag))(QUALIFIER(LABEL)(STRING(s)))))))))",
myTester.scanInput("ci(a) OR tag(s)").parseAs("kwqlrule"));
}
@Test
public void test24() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE)(AND(RESOURCE(TYPE(tag))(QUALIFIER(LABEL)(STRING(a))))(RESOURCE(TYPE(tag))(QUALIFIER(LABEL)(STRING(s)))))))))",
myTester.scanInput("tag(a) AND tag(s)").parseAs("kwqlrule"));
}
@Test
public void test25() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE)(OR(RESOURCE(TYPE(tag))(QUALIFIER(LABEL)(STRING(a))))" +
"(RESOURCE(TYPE(tag))(QUALIFIER(LABEL)(STRING(s)))))))))",
myTester.scanInput("tag(a) OR tag(s)").parseAs("kwqlrule"));
}
@Test
public void test26() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(OR(RESOURCE(TYPE(ci))(QUALIFIER(LABEL)(STRING(a))))(RESOURCE(TYPE(ci))" +
"(QUALIFIER(LABEL)(STRING(s))))))))",
myTester.scanInput("ci(a) OR ci(s)").parseAs("kwqlrule"));
}
@Test()
public void test23a() {
assertTree(
KWQL.RULE,
"(RULE(HEAD(RESOURCE(TYPE(ci))(QUALIFIER(LABEL(title))(STRING(a)))))" +
"(SEL_QUERY(AND(CI_BODY(RESOURCE(TYPE(ci))(QUALIFIER(LABEL)(STRING(a)))))" +
"(CI_BODY(RESOURCE(TYPE)(RESOURCE(TYPE(tag))(QUALIFIER(LABEL)(STRING(s)))))))))",
myTester.scanInput("ci(title:a)@ci(a) AND tag(s)").parseAs("kwqlrule"));
}
@Test()
public void test24a() {
assertTree(
KWQL.RULE,
"(RULE(HEAD(RESOURCE(TYPE(ci))(QUALIFIER(LABEL(title))(STRING(a)))))" +
"(SEL_QUERY(AND(CI_BODY(RESOURCE(TYPE(ci))(QUALIFIER(LABEL)(STRING(a)))))" +
"(CI_BODY(RESOURCE(TYPE(ci))(QUALIFIER(LABEL)(STRING(s))))))))",
myTester.scanInput("ci(title:a)@ci(a) AND ci(s)").parseAs("kwqlrule"));
}
@Test()
public void test25a() {
assertTree(
KWQL.RULE,
"(RULE(HEAD(RESOURCE(TYPE(ci))(QUALIFIER(LABEL(title))(STRING(a)))))" +
"(SEL_QUERY(AND(CI_BODY(RESOURCE(TYPE)(RESOURCE(TYPE(tag))(QUALIFIER(LABEL)(STRING(a))))))" +
"(CI_BODY(RESOURCE(TYPE(ci))(QUALIFIER(LABEL)(STRING(s))))))))",
myTester.scanInput("ci(title:a)@tag(a) AND ci(s)").parseAs("kwqlrule"));
}
@Test
public void test27() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)(NOT(STRING(a))))))))",
myTester.scanInput("NOT(a)").parseAs("kwqlrule"));
}
@Test
public void test28() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE)(NOT(QUALIFIER(LABEL(title))(STRING(a))))))))",
myTester.scanInput("NOT(title:a)").parseAs("kwqlrule"));
}
@Test
public void test29() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(NOT(RESOURCE(TYPE(ci))(QUALIFIER(LABEL(title))(STRING(a))))))))",
myTester.scanInput("NOT(ci(title:a))").parseAs("kwqlrule"));
}
@Test
public void test30() {
assertTree(KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE)(NOT(RESOURCE(TYPE(tag))" +
"(QUALIFIER(LABEL(title))(STRING(a)))))))))", myTester
.scanInput("NOT(tag(title:a))").parseAs("kwqlrule"));
}
@Test
public void test31() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE)(NOT(OR(RESOURCE(TYPE(tag))(QUALIFIER(LABEL(name))" +
"(STRING(a))))(QUALIFIER(LABEL(title))(STRING(a)))))))))", myTester
.scanInput("NOT(tag(name:a) OR title:a)").parseAs(
"kwqlrule"));
}
@Test
public void test32() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE)(NOT(AND(RESOURCE(TYPE(tag))(QUALIFIER(LABEL(name))" +
"(STRING(a))))(RESOURCE(TYPE(tag))(QUALIFIER(LABEL(name))(STRING(b))))))))))", myTester
.scanInput("NOT(tag(name:a) AND tag(name:b))").parseAs(
"kwqlrule"));
}
@Test
public void test33() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE)(NOT(AND(RESOURCE(TYPE(tag))" +
"(QUALIFIER(LABEL)(STRING(a))))(NOT(AND(RESOURCE(TYPE(tag))(QUALIFIER(LABEL)(STRING(b))))" +
"(RESOURCE(TYPE(tag))(QUALIFIER(LABEL)(STRING(c))))))))))))", myTester
.scanInput("NOT(tag(a) AND NOT(tag(b) AND tag(c)))")
.parseAs("kwqlrule"));
}
@Test
public void test39() {
assertTree(
KWQL.RULE,
"(RULE(HEAD(RESOURCE(TYPE(ci))(QUALIFIER(LABEL(title))(STRING(a)))))" +
"(SEL_QUERY(AND(CI_BODY(RESOURCE(TYPE(ci))(QUALIFIER(LABEL(author))(STRING(Mary)))))" +
"(CI_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)(STRING(XML))))))))", myTester
.scanInput("ci(title:a)@ci(author:Mary) AND XML")
.parseAs("kwqlrule"));
}
/**
* Faulty query3.
*/
@Test(expected = AssertionError.class)
public void faultyQuery3() {
myTester.scanInput("NOT(NOT(ci(a)))").parseAs("kwqlrule");
}
/**
* Faulty query4.
*/
@Test(expected = AssertionError.class)
public void faultyQuery4() {
myTester.scanInput("ci(title:ci(a))").parseAs("kwqlrule");
}
@Test
public void test34() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE)(RESOURCE(TYPE(tag))" +
"(QUALIFIER(LABEL(name))(STRING(fragment))))))))",
myTester.scanInput("tag(name:\\fragment)").parseAs("kwqlrule"));
}
@Test
public void test35() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE(ci))(AND(AND(QUALIFIER(LABEL(text))" +
"(STRING(Java)))(QUALIFIER(LABEL(author))(VAR(X))))(OPTIONAL(s)(RESOURCE(TYPE(tag))" +
"(QUALIFIER(LABEL(name))(VAR(Y))))))))))",
myTester.scanInput(
"ci(text:Java author:$X OPTIONAL(s) tag(name:$Y))")
.parseAs("kwqlrule"));
}
@Test
public void test36() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_BODY(RESOURCE(TYPE(ci))(NOT(AND(QUALIFIER(LABEL(title))" +
"(STRING(bla)))(RESOURCE(TYPE(tag))(QUALIFIER(LABEL(name))(STRING(blub))))))))))",
myTester.scanInput("ci(NOT(title:bla AND tag(name:blub)))").parseAs("kwqlrule"));
}
@Test
public void test37() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_COMP(CI_BODY(RESOURCE(TYPE(ci))(NOT(AND(QUALIFIER(LABEL(title))" +
"(STRING(bla)))(RESOURCE(TYPE(tag))(QUALIFIER(LABEL(name))(STRING(blub))))))))" +
"(BOOL_BODY(RESOURCE(TYPE(ci))(QUALIFIER(LABEL)(STRING(b))))))))",
myTester.scanInput("ci(NOT(title:bla AND tag(name:blub))) BOOLEAN(ci(b))").parseAs("kwqlrule"));
}
@Test
public void test38() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_COMP(CI_BODY(RESOURCE(TYPE(ci))(NOT(AND(QUALIFIER(LABEL(title))(STRING(bla)))" +
"(RESOURCE(TYPE(tag))(QUALIFIER(LABEL(name))(STRING(blub))))))))(BOOL_BODY(RESOURCE(TYPE(ci))" +
"(QUALIFIER(LABEL)(STRING(b))))))))",
myTester.scanInput("(ci(NOT(title:bla AND tag(name:blub))) BOOLEAN(ci(b)))").parseAs("kwqlrule"));
}
@Test
public void globalTest1() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_COMP(CI_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)(STRING(a)))))" +
"(BOOL_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)(STRING(b))))))))",
myTester.scanInput("a AND BOOLEAN(b)").parseAs("kwqlrule"));
}
@Test
public void globalTest2() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_COMP(CI_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)" +
"(STRING(a)))))(BOOL_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)(NOT(STRING(b)))))))))",
myTester.scanInput("a AND BOOLEAN(NOT b)").parseAs("kwqlrule"));
}
@Test
public void globalTest3() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_COMP(CI_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)(STRING(a)))))" +
"(BOOL_BODY(NOT(RESOURCE(TYPE)(QUALIFIER(LABEL)(NOT(STRING(b))))))))))",
myTester.scanInput("a AND NOT BOOLEAN(NOT b)").parseAs("kwqlrule"));
}
@Test
public void globalTest4() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_COMP(CI_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)(STRING(a)))))" +
"(BOOL_BODY(OR(NOT(RESOURCE(TYPE)(QUALIFIER(LABEL)(NOT(STRING(b))))))(RESOURCE(TYPE)" +
"(QUALIFIER(LABEL)(STRING(c)))))))))",
myTester.scanInput("a AND NOT BOOLEAN(NOT b) OR BOOLEAN(c)").parseAs("kwqlrule"));
}
@Test
public void globalTest5() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_COMP(CI_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)(STRING(a)))))" +
"(BOOL_BODY(AND(NOT(RESOURCE(TYPE)(QUALIFIER(LABEL)(NOT(STRING(b))))))(RESOURCE(TYPE)" +
"(QUALIFIER(LABEL)(STRING(c)))))))))",
myTester.scanInput("a AND NOT BOOLEAN(NOT b) BOOLEAN(c)").parseAs("kwqlrule"));
}
@Test
public void globalTest6() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_COMP(CI_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)(STRING(a)))))" +
"(BOOL_BODY(NOT(OR(RESOURCE(TYPE)(QUALIFIER(LABEL)(NOT(STRING(b)))))(RESOURCE(TYPE)" +
"(QUALIFIER(LABEL)(STRING(c))))))))))",
myTester.scanInput("a AND NOT (BOOLEAN(NOT b) OR BOOLEAN(c))").parseAs("kwqlrule"));
}
@Test
public void globalTest7() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_COMP(CI_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)(STRING(a)))))" +
"(BOOL_BODY(AND(NOT(OR(RESOURCE(TYPE)(QUALIFIER(LABEL)(NOT(STRING(b)))))" +
"(RESOURCE(TYPE)(QUALIFIER(LABEL)(STRING(c))))))(RESOURCE(TYPE)(QUALIFIER(LABEL)(STRING(f)))))))))",
myTester.scanInput("a AND NOT (BOOLEAN(NOT b) OR BOOLEAN(c)) AND BOOLEAN(f)").parseAs("kwqlrule"));
}
@Test
public void globalTest8() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_COMP(CI_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)(STRING(a)))))" +
"(BOOL_BODY(NOT(RESOURCE(TYPE)(QUALIFIER(LABEL)(NOT(STRING(b))))))(RESOURCE(TYPE)" +
"(QUALIFIER(LABEL)(STRING(b))))))))",
myTester.scanInput("NOT BOOLEAN(NOT b) a AND BOOLEAN(b)").parseAs("kwqlrule"));
}
@Test
public void globalTest9() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_COMP(CI_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)(STRING(a)))))" +
"(BOOL_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)(STRING(c))))(RESOURCE(TYPE)" +
"(QUALIFIER(LABEL)(STRING(r))))))))",
myTester.scanInput("BOOLEAN(c) AND a AND BOOLEAN(r)").parseAs("kwqlrule"));
}
@Test
public void globalTest10() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_COMP(CI_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)(STRING(a)))))" +
"(BOOL_BODY(NOT(RESOURCE(TYPE)(QUALIFIER(LABEL)(NOT(STRING(b))))))(RESOURCE(TYPE)" +
"(QUALIFIER(LABEL)(STRING(r))))))))",
myTester.scanInput("NOT BOOLEAN(NOT b) AND a AND BOOLEAN(r)").parseAs("kwqlrule"));
}
@Test
public void globalTest11() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(AND(OR(CI_COMP(CI_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)(STRING(a)))))" +
"(BOOL_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)(STRING(b))))))(CI_COMP(CI_BODY(RESOURCE(TYPE)" +
"(QUALIFIER(LABEL)(STRING(b)))))(BOOL_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)(STRING(a)))))))" +
"(BOOL_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)(STRING(d))))))))",
myTester.scanInput("(a BOOLEAN(b) OR b BOOLEAN(a)) BOOLEAN(d)").parseAs("kwqlrule"));
}
@Test
public void globalTest12() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(OR(OR(CI_COMP(CI_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)(STRING(a)))))" +
"(BOOL_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)(STRING(b))))))(CI_COMP(CI_BODY(RESOURCE(TYPE)" +
"(QUALIFIER(LABEL)(STRING(d)))))(BOOL_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)(STRING(c)))))))" +
"(CI_COMP(CI_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)(STRING(h)))))(BOOL_BODY(RESOURCE(TYPE)" +
"(QUALIFIER(LABEL)(STRING(f)))))))))",
myTester.scanInput("a BOOLEAN(b) OR d BOOLEAN(c) OR (BOOLEAN(f) h)").parseAs("kwqlrule"));
}
@Test
public void globalTest13() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(OR(AND(OR(CI_COMP(CI_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)(STRING(a)))))" +
"(BOOL_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)(STRING(b))))))(CI_COMP(CI_BODY(RESOURCE(TYPE)" +
"(QUALIFIER(LABEL)(STRING(b)))))(BOOL_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)(STRING(c)))))))" +
"(BOOL_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)(STRING(d))))))(CI_BODY(RESOURCE(TYPE)" +
"(QUALIFIER(LABEL)(STRING(d))))))))",
myTester.scanInput("(a BOOLEAN(b) OR b BOOLEAN(c)) BOOLEAN(d) OR d").parseAs("kwqlrule"));
}
@Test
public void globalTest14() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(AND(OR(CI_COMP(CI_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)(STRING(a)))))" +
"(BOOL_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)(STRING(b))))))(CI_COMP(CI_BODY(RESOURCE(TYPE)" +
"(QUALIFIER(LABEL)(STRING(b)))))(BOOL_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)(STRING(c)))))))" +
"(BOOL_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)(STRING(dd))))))))",
myTester.scanInput("BOOLEAN(dd)(a BOOLEAN(b) OR b BOOLEAN(c))").parseAs("kwqlrule"));
}
@Test
public void globalTest15() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_COMP(CI_BODY(RESOURCE(TYPE(ci))(QUALIFIER(LABEL(text))(VAR(n)))))" +
"(BOOL_BODY(RESOURCE(TYPE(ci))(AND(QUALIFIER(LABEL(title))(VAR(n)))(RESOURCE(TYPE(tag))" +
"(QUALIFIER(LABEL(name))(STRING(KiWi))))))))))",
myTester.scanInput("ci(text:$n) BOOLEAN(ci(title:$n tag(name:KiWi)))").parseAs("kwqlrule"));
}
@Test
public void globalTest16() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(AND(OR(CI_COMP(CI_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)(STRING(a)))))" +
"(BOOL_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)(STRING(b))))))(CI_COMP(CI_BODY(RESOURCE(TYPE)" +
"(QUALIFIER(LABEL)(STRING(b)))))(BOOL_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)(STRING(c)))))))" +
"(BOOL_BODY(AND(RESOURCE(TYPE)(QUALIFIER(LABEL)(STRING(dsd))))(RESOURCE(TYPE)(QUALIFIER(LABEL)" +
"(STRING(dd)))))))))",
myTester.scanInput("BOOLEAN(dsd) BOOLEAN(dd)(a BOOLEAN(b) OR b BOOLEAN(c))").parseAs("kwqlrule"));
}
@Test
public void globalTest17() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_COMP(CI_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)(OR(STRING(a))" +
"(STRING(c))))))(BOOL_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)(STRING(g))))))))",
myTester.scanInput("(a OR c) AND BOOLEAN(g)").parseAs("kwqlrule"));
}
@Test
public void globalTest18() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(OR(CI_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)(STRING(a)))))" +
"(CI_COMP(CI_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)(STRING(c)))))(BOOL_BODY(RESOURCE(TYPE)" +
"(QUALIFIER(LABEL)(STRING(g)))))))))",
myTester.scanInput("a OR (c AND BOOLEAN(g))").parseAs("kwqlrule"));
}
@Test
public void globalTest19() {
assertTree(
KWQL.RULE,
"(RULE(HEAD)(CI_QUERY(CI_COMP(CI_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)(OR(STRING(a))" +
"(STRING(c))))))(BOOL_BODY(RESOURCE(TYPE)(QUALIFIER(LABEL)(STRING(g))))))))",
myTester.scanInput("a OR c AND BOOLEAN(g)").parseAs("kwqlrule"));
}
}
| |
// Copyright 2016 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.syntax;
import com.google.common.collect.ImmutableList;
import com.google.devtools.build.lib.collect.nestedset.Order;
import com.google.devtools.build.lib.events.Location;
import com.google.devtools.build.lib.skylarkinterface.Param;
import com.google.devtools.build.lib.skylarkinterface.SkylarkSignature;
import java.util.List;
/**
* A helper class containing additional built in functions for Bazel (BUILD files and .bzl files).
*/
public class BazelLibrary {
@SkylarkSignature(
name = "type",
returnType = String.class,
doc =
"Returns the type name of its argument. This is useful for debugging and "
+ "type-checking. Examples:"
+ "<pre class=\"language-python\">"
+ "type(2) == \"int\"\n"
+ "type([1]) == \"list\"\n"
+ "type(struct(a = 2)) == \"struct\""
+ "</pre>"
+ "This function might change in the future. To write Python-compatible code and "
+ "be future-proof, use it only to compare return values: "
+ "<pre class=\"language-python\">"
+ "if type(x) == type([]): # if x is a list"
+ "</pre>",
parameters = {@Param(name = "x", doc = "The object to check type of.")}
)
private static final BuiltinFunction type =
new BuiltinFunction("type") {
public String invoke(Object object) {
// There is no 'type' type in Skylark, so we return a string with the type name.
String name = EvalUtils.getDataTypeName(object, false);
// TODO(bazel-team): Temporary change to avoid breaking existing code.
if (name.equals("depset")) {
return "set";
}
return name;
}
};
@SkylarkSignature(
name = "depset",
returnType = SkylarkNestedSet.class,
doc =
"Creates a <a href=\"depset.html\">depset</a> from the <code>items</code>. "
+ "The depset supports nesting other depsets of the same element type in it. "
+ "A desired <a href=\"depset.html\">iteration order</a> can also be specified.<br>"
+ "Examples:<br><pre class=\"language-python\">depset([\"a\", \"b\"])\n"
+ "depset([1, 2, 3], order=\"postorder\")</pre>",
parameters = {
@Param(
name = "items",
type = Object.class,
defaultValue = "[]",
doc =
"The items to initialize the depset with. May contain both standalone items "
+ "and other depsets."
),
@Param(
name = "order",
type = String.class,
defaultValue = "\"default\"",
doc =
"The ordering strategy for the depset. Possible values are: <code>default</code> "
+ "(default), <code>postorder</code>, <code>topological</code>, and "
+ "<code>preorder</code>. These are also known by the deprecated names "
+ "<code>stable</code>, <code>compile</code>, <code>link</code> and "
+ "<code>naive_link</code> respectively. An explanation of the values can be found "
+ "<a href=\"depset.html\">here</a>."
)
},
useLocation = true
)
private static final BuiltinFunction depset =
new BuiltinFunction("depset") {
public SkylarkNestedSet invoke(Object items, String order, Location loc)
throws EvalException {
try {
return new SkylarkNestedSet(Order.parse(order), items, loc);
} catch (IllegalArgumentException ex) {
throw new EvalException(loc, ex);
}
}
};
@SkylarkSignature(
name = "set",
returnType = SkylarkNestedSet.class,
doc =
"A temporary alias for <a href=\"#depset\">depset</a>. "
+ "Deprecated in favor of <code>depset</code>.",
parameters = {
@Param(
name = "items",
type = Object.class,
defaultValue = "[]",
doc = "Same as for <a href=\"#depset\">depset</a>."
),
@Param(
name = "order",
type = String.class,
defaultValue = "\"default\"",
doc = "Same as for <a href=\"#depset\">depset</a>."
)
},
useLocation = true
)
private static final BuiltinFunction set =
new BuiltinFunction("set") {
public SkylarkNestedSet invoke(Object items, String order, Location loc)
throws EvalException {
try {
return new SkylarkNestedSet(Order.parse(order), items, loc);
} catch (IllegalArgumentException ex) {
throw new EvalException(loc, ex);
}
}
};
@SkylarkSignature(
name = "union",
objectType = SkylarkNestedSet.class,
returnType = SkylarkNestedSet.class,
doc =
"Creates a new <a href=\"depset.html\">depset</a> that contains both "
+ "the input depset as well as all additional elements.",
parameters = {
@Param(name = "input", type = SkylarkNestedSet.class, doc = "The input depset."),
@Param(name = "new_elements", type = Object.class, doc = "The elements to be added.")
},
useLocation = true
)
private static final BuiltinFunction union =
new BuiltinFunction("union") {
@SuppressWarnings("unused")
public SkylarkNestedSet invoke(
SkylarkNestedSet input, Object newElements, Location loc)
throws EvalException {
// newElements' type is Object because of the polymorphism on unioning two
// SkylarkNestedSets versus a set and another kind of iterable.
// Can't use EvalUtils#toIterable since that would discard this information.
return new SkylarkNestedSet(input, newElements, loc);
}
};
/**
* Returns a function-value implementing "select" (i.e. configurable attributes) in the specified
* package context.
*/
@SkylarkSignature(
name = "select",
doc = "Creates a SelectorValue from the dict parameter.",
parameters = {
@Param(name = "x", type = SkylarkDict.class, doc = "The parameter to convert."),
@Param(
name = "no_match_error",
type = String.class,
defaultValue = "''",
doc = "Optional custom error to report if no condition matches."
)
}
)
private static final BuiltinFunction select =
new BuiltinFunction("select") {
public Object invoke(SkylarkDict<?, ?> dict, String noMatchError) throws EvalException {
return SelectorList.of(new SelectorValue(dict, noMatchError));
}
};
private static Environment.Frame createGlobals() {
List<BaseFunction> bazelGlobalFunctions =
ImmutableList.<BaseFunction>of(select, depset, set, type);
try (Mutability mutability = Mutability.create("BUILD")) {
Environment env = Environment.builder(mutability).build();
Runtime.setupConstants(env);
Runtime.setupMethodEnvironment(env, MethodLibrary.defaultGlobalFunctions);
Runtime.setupMethodEnvironment(env, bazelGlobalFunctions);
return env.getGlobals();
}
}
public static final Environment.Frame GLOBALS = createGlobals();
static {
SkylarkSignatureProcessor.configureSkylarkFunctions(BazelLibrary.class);
}
}
| |
/*
* JBoss, Home of Professional Open Source.
* Copyright 2014 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.undertow.security.impl;
import static io.undertow.UndertowMessages.MESSAGES;
import io.undertow.security.api.SessionNonceManager;
import io.undertow.server.HttpServerExchange;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.security.SecureRandom;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import java.util.WeakHashMap;
import java.util.concurrent.TimeUnit;
import org.xnio.XnioExecutor;
import org.xnio.XnioExecutor.Key;
import io.undertow.util.FlexBase64;
/**
* A default {@link io.undertow.security.api.NonceManager} implementation to provide reasonable single host management of nonces.
*
* This {@link io.undertow.security.api.NonceManager} manages nonces in two groups, the first is the group that are allocated to new requests, this group
* is a problem as we want to be able to limit how many we distribute so we don't have a DOS storing too many but we also don't
* a high number of requests to to push the other valid nonces out faster than they can be used.
*
* The second group is the set of nonces actively in use - these should be maintained as we can also maintain the nonce count
* and even track the next nonce once invalid.
*
* Maybe group one should be a timestamp and private key hashed together, if used with a nonce count they move to be tracked to
* ensure the same count is not used again - if successfully used without a nonce count add to a blacklist until expiration? A
* nonce used without a nonce count will essentially be single use with each request getting a new nonce.
*
* @author <a href="mailto:darran.lofthouse@jboss.com">Darran Lofthouse</a>
*/
public class SimpleNonceManager implements SessionNonceManager {
private static final String DEFAULT_HASH_ALG = "MD5";
private static final Charset UTF_8 = Charset.forName("UTF-8");
/**
* List of invalid nonces, this list contains the nonces that have been used without a nonce count.
*
* In that situation they are considered single use and must not be used again.
*/
private final Set<String> invalidNonces = Collections.synchronizedSet(new HashSet<String>());
/**
* Map of known currently valid nonces, a SortedMap is used to order the nonces by their creation time stamp allowing a
* simple iteration over the keys to identify expired nonces.
*/
private final Map<String, Nonce> knownNonces = Collections.synchronizedMap(new HashMap<String, Nonce>());
/**
* A WeakHashMap to map expired nonces to their replacement nonce. For an item to be added to this Collection the value will
* have been removed from the knownNonces map.
*
* A replacement nonce will have been added to knownNonces that references the key used here - once the replacement nonce is
* removed from knownNonces then the key will be eligible for garbage collection allowing it to be removed from this map as
* well.
*
* The value in this Map is a plain String, this is to avoid inadvertently creating a long term reference to the key we
* expect to be garbage collected at some point in the future.
*/
private final Map<NonceHolder, String> forwardMapping = Collections.synchronizedMap(new WeakHashMap<NonceHolder, String>());
/**
* A pseudo-random generator for creating the nonces, a secure random is not required here as this is used purely to
* minimise the chance of collisions should two nonces be generated at exactly the same time.
*/
private final Random random = new Random();
private final String secret;
private final String hashAlg;
private final int hashLength;
/**
* After a nonce is issued the first authentication response MUST be received within 5 minutes.
*/
private final long firstUseTimeOut = 5 * 60 * 1000;
/**
* Overall a nonce is valid from 15 minutes from first being issued, if used after this then a new nonce will be issued.
*/
private final long overallTimeOut = 15 * 60 * 1000;
/**
* A previously used nonce will be allowed to remain in the knownNonces list for up to 5 minutes.
*
* The nonce will be accepted during this 5 minute window but will immediately be replaced causing any additional requests
* to be forced to use the new nonce.
*
* This is primarily for session based digests where loosing the cached session key would be bad.
*/
private final long cacheTimePostExpiry = 5 * 60 * 1000;
public SimpleNonceManager() {
this(DEFAULT_HASH_ALG);
}
public SimpleNonceManager(final String hashAlg) {
// Verify it is a valid algorithm (at least for now)
MessageDigest digest = getDigest(hashAlg);
this.hashAlg = hashAlg;
this.hashLength = digest.getDigestLength();
// Create a new secret only valid within this NonceManager instance.
Random rand = new SecureRandom();
byte[] secretBytes = new byte[32];
rand.nextBytes(secretBytes);
secret = FlexBase64.encodeString(digest.digest(secretBytes), false);
}
private MessageDigest getDigest(final String hashAlg) {
try {
return MessageDigest.getInstance(hashAlg);
} catch (NoSuchAlgorithmException e) {
throw MESSAGES.hashAlgorithmNotFound(hashAlg);
}
}
/**
*
* @see io.undertow.security.api.NonceManager#nextNonce(java.lang.String)
*/
public String nextNonce(String lastNonce, HttpServerExchange exchange) {
if (lastNonce == null) {
return createNewNonceString();
}
if (invalidNonces.contains(lastNonce)) {
// The nonce supplied has already been used.
return createNewNonceString();
}
String nonce = lastNonce;
// Loop the forward mappings.
synchronized (forwardMapping) {
NonceHolder holder = new NonceHolder(lastNonce);
while (forwardMapping.containsKey(holder)) {
nonce = forwardMapping.get(holder);
// The final NonceHolder will then be used if a forwardMapping needs to be set.
holder = new NonceHolder(nonce);
}
synchronized (knownNonces) {
Nonce value = knownNonces.get(nonce);
if (value == null) {
// Not a likely scenario but if this occurs then most likely the nonce mapped to has also expired so we will
// just send a new nonce.
nonce = createNewNonceString();
} else {
long now = System.currentTimeMillis();
// The cacheTimePostExpiry is not included here as this is our opportunity to inform the client to use a
// replacement nonce without a stale round trip.
long earliestAccepted = now - firstUseTimeOut;
if (value.timeStamp < earliestAccepted || value.timeStamp > now) {
XnioExecutor executor = exchange.getIoThread();
Nonce replacement = createNewNonce(holder);
if (value.executorKey != null) {
// The outcome doesn't matter - if we have the value we have all we need.
value.executorKey.remove();
}
nonce = replacement.nonce;
// Create a record of the forward mapping so if any requests do need to be marked stale they can be
// pointed towards the correct nonce to use.
forwardMapping.put(holder, nonce);
// Bring over any existing session key.
replacement.setSessionKey(value.getSessionKey());
// At this point we will not accept the nonce again so remove it from the list of known nonces but do
// register the replacement.
knownNonces.remove(holder.nonce);
// There are two reasons for registering the replacement 1 - to preserve any session key, 2 - To keep a
// reference to the now invalid key so it
// can be used as a key in a weak hash map.
knownNonces.put(nonce, replacement);
earliestAccepted = now - (overallTimeOut + cacheTimePostExpiry);
long timeTillExpiry = replacement.timeStamp - earliestAccepted;
replacement.executorKey = executor.executeAfter(new KnownNonceCleaner(nonce), timeTillExpiry,
TimeUnit.MILLISECONDS);
}
}
}
}
return nonce;
}
private String createNewNonceString() {
return createNewNonce(null).nonce;
}
private Nonce createNewNonce(NonceHolder previousNonce) {
byte[] prefix = new byte[8];
random.nextBytes(prefix);
long timeStamp = System.currentTimeMillis();
byte[] now = Long.toString(timeStamp).getBytes(UTF_8);
String nonce = createNonce(prefix, now);
return new Nonce(nonce, timeStamp, previousNonce);
}
/**
*
* @see io.undertow.security.api.NonceManager#validateNonce(java.lang.String, int)
*/
@Override
public boolean validateNonce(String nonce, int nonceCount, HttpServerExchange exchange) {
XnioExecutor executor = exchange.getIoThread();
if (nonceCount < 0) {
if (invalidNonces.contains(nonce)) {
// Without a nonce count the nonce is only usable once.
return false;
}
// Not already known so will drop into first use validation.
} else if (knownNonces.containsKey(nonce)) {
// At this point we need to validate that the nonce is still within it's time limits,
// If a new nonce had been selected then a known nonce would not have been found.
// The nonce will also have it's nonce count checked.
return validateNonceWithCount(new Nonce(nonce), nonceCount, executor);
} else if (forwardMapping.containsKey(new NonceHolder(nonce))) {
// We could have let this drop through as the next validation would fail anyway but
// why waste the time if we already know a replacement nonce has been issued.
return false;
}
// This is not a nonce currently known to us so start the validation process.
Nonce value = verifyUnknownNonce(nonce, nonceCount);
if (value == null) {
return false;
}
long now = System.currentTimeMillis();
// NOTE - This check is for the first use, overall validity is checked in validateNonceWithCount.
long earliestAccepted = now - firstUseTimeOut;
if (value.timeStamp < earliestAccepted || value.timeStamp > now) {
// The embedded timestamp is either expired or somehow is after now.
return false;
}
if (nonceCount < 0) {
// Allow a single use but reject all further uses.
return addInvalidNonce(value, executor);
} else {
return validateNonceWithCount(value, nonceCount, executor);
}
}
private boolean validateNonceWithCount(Nonce nonce, int nonceCount, final XnioExecutor executor) {
// This point could have been reached either because the knownNonces map contained the key or because
// it didn't and a count was supplied - either way need to double check the contents of knownNonces once
// the lock is in place.
synchronized (knownNonces) {
Nonce value = knownNonces.get(nonce.nonce);
long now = System.currentTimeMillis();
// For the purpose of this validation we also add the cacheTimePostExpiry - when nextNonce is subsequently
// called it will decide if we are in the interval to replace the nonce.
long earliestAccepted = now - (overallTimeOut + cacheTimePostExpiry);
if (value == null) {
if (nonce.timeStamp < 0) {
// Means it was in there, now it isn't - most likely a timestamp expiration mid check - abandon validation.
return false;
}
if (nonce.timeStamp > earliestAccepted && nonce.timeStamp < now) {
knownNonces.put(nonce.nonce, nonce);
long timeTillExpiry = nonce.timeStamp - earliestAccepted;
nonce.executorKey = executor.executeAfter(new KnownNonceCleaner(nonce.nonce), timeTillExpiry,
TimeUnit.MILLISECONDS);
return true;
}
return false;
} else {
// We have it, just need to verify that it has not expired and that the nonce key is valid.
if (value.timeStamp < earliestAccepted || value.timeStamp > now) {
// The embedded timestamp is either expired or somehow is after now!!
return false;
}
if (value.getMaxNonceCount() < nonceCount) {
value.setMaxNonceCount(nonceCount);
return true;
}
return false;
}
}
}
private boolean addInvalidNonce(final Nonce nonce, final XnioExecutor executor) {
long now = System.currentTimeMillis();
long invalidBefore = now - firstUseTimeOut;
long timeTillInvalid = nonce.timeStamp - invalidBefore;
if (timeTillInvalid > 0) {
if (invalidNonces.add(nonce.nonce)) {
executor.executeAfter(new InvalidNonceCleaner(nonce.nonce), timeTillInvalid, TimeUnit.MILLISECONDS);
return true;
} else {
return false;
}
} else {
// So close to expiring any record of this nonce being used could have been cleared so
// don't take a chance and just say no.
return false;
}
}
/**
* Verify a previously unknown nonce and return the {@link NonceKey} representation for the nonce.
*
* Later when a nonce is re-used we can match based on the String alone - the information embedded within the nonce will be
* cached with it.
*
* This stage of the verification simply extracts the prefix and the embedded timestamp and recreates a new hashed and
* Base64 nonce based on the local secret - if the newly generated nonce matches the supplied one we accept it was created
* by this nonce manager.
*
* This verification does not validate that the timestamp is within a valid time period.
*
* @param nonce -
* @return
*/
private Nonce verifyUnknownNonce(final String nonce, final int nonceCount) {
byte[] complete;
int offset;
int length;
try {
ByteBuffer decode = FlexBase64.decode(nonce);
complete = decode.array();
offset = decode.arrayOffset();
length = decode.limit() - offset;
} catch (IOException e) {
throw MESSAGES.invalidBase64Token(e);
}
int timeStampLength = complete[offset + 8];
// A sanity check to try and verify the sizes we expect from the arrays are correct.
if (hashLength > 0) {
int expectedLength = 9 + timeStampLength + hashLength;
if (length != expectedLength) {
throw MESSAGES.invalidNonceReceived();
} else if (timeStampLength + 1 >= length)
throw MESSAGES.invalidNonceReceived();
}
byte[] prefix = new byte[8];
System.arraycopy(complete, offset, prefix, 0, 8);
byte[] timeStampBytes = new byte[timeStampLength];
System.arraycopy(complete, offset + 9, timeStampBytes, 0, timeStampBytes.length);
String expectedNonce = createNonce(prefix, timeStampBytes);
if (expectedNonce.equals(nonce)) {
try {
long timeStamp = Long.parseLong(new String(timeStampBytes, UTF_8));
return new Nonce(expectedNonce, timeStamp, nonceCount);
} catch (NumberFormatException dropped) {
}
}
return null;
}
private String createNonce(final byte[] prefix, final byte[] timeStamp) {
byte[] hashedPart = generateHash(prefix, timeStamp);
byte[] complete = new byte[9 + timeStamp.length + hashedPart.length];
System.arraycopy(prefix, 0, complete, 0, 8);
complete[8] = (byte) timeStamp.length;
System.arraycopy(timeStamp, 0, complete, 9, timeStamp.length);
System.arraycopy(hashedPart, 0, complete, 9 + timeStamp.length, hashedPart.length);
return FlexBase64.encodeString(complete, false);
}
private byte[] generateHash(final byte[] prefix, final byte[] timeStamp) {
MessageDigest digest = getDigest(hashAlg);
digest.update(prefix);
digest.update(timeStamp);
return digest.digest(secret.getBytes(UTF_8));
}
public void associateHash(String nonce, byte[] hash) {
// TODO Auto-generated method stub
}
public byte[] lookupHash(String nonce) {
// TODO Auto-generated method stub
return null;
}
/**
* A simple wrapper around a nonce to allow it to be used as a key in a weak map.
*/
private class NonceHolder {
private final String nonce;
private NonceHolder(final String nonce) {
if (nonce == null) {
throw new NullPointerException("nonce must not be null.");
}
this.nonce = nonce;
}
@Override
public int hashCode() {
return nonce.hashCode();
}
@Override
public boolean equals(Object obj) {
return (obj instanceof NonceHolder) ? nonce.equals(((NonceHolder) obj).nonce) : false;
}
}
/**
* The state associated with a nonce.
*
* A NonceKey for a preciously valid nonce is also referenced, this is so that a WeakHashMap can be used to maintain a
* mapping from the original NonceKey to the new nonce value.
*/
private class Nonce {
private final String nonce;
private final long timeStamp;
// TODO we will also add a mechanism to track the gaps as the only restriction is that a NC can only be used one.
private int maxNonceCount;
// We keep this as it is used in the wek hash map as a forward mapping as long as the nonce to map to is still alive.
@SuppressWarnings("unused")
private final NonceHolder previousNonce;
private byte[] sessionKey;
private Key executorKey;
private Nonce(final String nonce) {
this(nonce, -1, -1);
}
private Nonce(final String nonce, final long timeStamp) {
this(nonce, timeStamp, -1);
}
private Nonce(final String nonce, final long timeStamp, final int initialNC) {
this(nonce, timeStamp, initialNC, null);
}
private Nonce(final String nonce, final long timeStamp, final NonceHolder previousNonce) {
this(nonce, timeStamp, -1, previousNonce);
}
private Nonce(final String nonce, final long timeStamp, final int initialNC, final NonceHolder previousNonce) {
this.nonce = nonce;
this.timeStamp = timeStamp;
this.maxNonceCount = initialNC;
this.previousNonce = previousNonce;
}
byte[] getSessionKey() {
return sessionKey;
}
void setSessionKey(final byte[] sessionKey) {
this.sessionKey = sessionKey;
}
int getMaxNonceCount() {
return maxNonceCount;
}
void setMaxNonceCount(int maxNonceCount) {
this.maxNonceCount = maxNonceCount;
}
}
private class InvalidNonceCleaner implements Runnable {
private final String nonce;
private InvalidNonceCleaner(final String nonce) {
if (nonce == null) {
throw new NullPointerException("nonce must not be null.");
}
this.nonce = nonce;
}
public void run() {
invalidNonces.remove(nonce);
}
}
private class KnownNonceCleaner implements Runnable {
private final String nonce;
private KnownNonceCleaner(final String nonce) {
if (nonce == null) {
throw new NullPointerException("nonce must not be null.");
}
this.nonce = nonce;
}
public void run() {
knownNonces.remove(nonce);
}
}
}
| |
/*
* Copyright 2013-2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.aws.core.io.s3;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.model.AmazonS3Exception;
import com.amazonaws.services.s3.model.Bucket;
import com.amazonaws.services.s3.model.ListObjectsRequest;
import com.amazonaws.services.s3.model.ObjectListing;
import com.amazonaws.services.s3.model.S3ObjectSummary;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.core.io.Resource;
import org.springframework.core.io.ResourceLoader;
import org.springframework.core.io.support.ResourcePatternResolver;
import org.springframework.util.AntPathMatcher;
import org.springframework.util.Assert;
import org.springframework.util.PathMatcher;
import org.springframework.util.StringUtils;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* A {@link ResourcePatternResolver} implementation which allows an ant-style path matching when
* loading S3 resources. Ant wildcards (*, ** and ?) are allowed in both, bucket name and object
* name.
* <p><b>WARNING:</b>
* Be aware that when you are using wildcards in the bucket name it can take a very long time to parse all
* files. Moreover this implementation does not return truncated results. This means that when handling
* huge buckets it could lead to serious performance problems. For more information look at the
* {@code findProgressivelyWithPartialMatch} method.</p>
*
* @author Alain Sahli
* @author Agim Emruli
* @since 1.0
*/
public class PathMatchingSimpleStorageResourcePatternResolver implements ResourcePatternResolver {
private static final Logger LOGGER = LoggerFactory.getLogger(PathMatchingSimpleStorageResourcePatternResolver.class);
private final AmazonS3 amazonS3;
private final ResourceLoader simpleStorageResourceLoader;
private final ResourcePatternResolver resourcePatternResolverDelegate;
private PathMatcher pathMatcher = new AntPathMatcher();
/**
* Construct a new instance of the {@link PathMatchingSimpleStorageResourcePatternResolver} with a
* {@link SimpleStorageResourceLoader} to load AmazonS3 instances, and also a delegate {@link ResourcePatternResolver}
* to resolve resource on default path (like file and classpath)
* @param amazonS3 - used to retrieve the directory listings
* @param simpleStorageResourceLoader - used to retrieve object from amazon s3
* @param resourcePatternResolverDelegate - delegate resolver used to resolve common path (file, classpath, servlet etc.)
*/
public PathMatchingSimpleStorageResourcePatternResolver(AmazonS3 amazonS3, ResourceLoader simpleStorageResourceLoader,
ResourcePatternResolver resourcePatternResolverDelegate) {
Assert.notNull(amazonS3);
this.amazonS3 = amazonS3;
this.simpleStorageResourceLoader = simpleStorageResourceLoader;
this.resourcePatternResolverDelegate = resourcePatternResolverDelegate;
}
/**
* Set the PathMatcher implementation to use for this
* resource pattern resolver. Default is AntPathMatcher.
*
* @param pathMatcher
* The pathMatches implementation used, must not be null
* @see AntPathMatcher
*/
public void setPathMatcher(PathMatcher pathMatcher) {
Assert.notNull(pathMatcher, "PathMatcher must not be null");
this.pathMatcher = pathMatcher;
}
@Override
public Resource[] getResources(String locationPattern) throws IOException {
if (SimpleStorageNameUtils.isSimpleStorageResource(locationPattern)) {
if (this.pathMatcher.isPattern(SimpleStorageNameUtils.stripProtocol(locationPattern))) {
LOGGER.debug("Found wildcard pattern in location {}", locationPattern);
return findPathMatchingResources(locationPattern);
} else {
return new Resource[]{this.simpleStorageResourceLoader.getResource(locationPattern)};
}
} else {
return this.resourcePatternResolverDelegate.getResources(locationPattern);
}
}
protected Resource[] findPathMatchingResources(String locationPattern) {
// Separate the bucket and key patterns as each one uses a different aws API for resolving.
String bucketPattern = SimpleStorageNameUtils.getBucketNameFromLocation(locationPattern);
String keyPattern = SimpleStorageNameUtils.getObjectNameFromLocation(locationPattern);
Set<Resource> resources;
if (this.pathMatcher.isPattern(bucketPattern)) {
List<String> matchingBuckets = findMatchingBuckets(bucketPattern);
LOGGER.debug("Found wildcard in bucket name {} buckets found are {}", bucketPattern, matchingBuckets);
// If the '**' wildcard is used in the bucket name, one have to inspect all
// objects in the bucket. Therefore the keyPattern is prefixed with '**/' so
// that the findPathMatchingKeys method knows that it must go through all objects.
if (bucketPattern.startsWith("**")) {
keyPattern = "**/" + keyPattern;
}
resources = findPathMatchingKeys(keyPattern, matchingBuckets);
LOGGER.debug("Found resources {} in buckets {}", resources, matchingBuckets);
} else {
LOGGER.debug("No wildcard in bucket name {} using single bucket name", bucketPattern);
resources = findPathMatchingKeys(keyPattern, Arrays.asList(bucketPattern));
}
return resources.toArray(new Resource[resources.size()]);
}
private Set<Resource> findPathMatchingKeys(String keyPattern, List<String> matchingBuckets) {
Set<Resource> resources = new HashSet<>();
if (this.pathMatcher.isPattern(keyPattern)) {
for (String bucketName : matchingBuckets) {
findPathMatchingKeyInBucket(bucketName, resources, null, keyPattern);
}
} else {
for (String matchingBucket : matchingBuckets) {
Resource resource = this.simpleStorageResourceLoader.getResource(SimpleStorageNameUtils.getLocationForBucketAndObject(matchingBucket, keyPattern));
if (resource.exists()) {
resources.add(resource);
}
}
}
return resources;
}
private void findPathMatchingKeyInBucket(String bucketName, Set<Resource> resources, String prefix, String keyPattern) {
String remainingPatternPart = getRemainingPatternPart(keyPattern, prefix);
if (remainingPatternPart.startsWith("**")) {
findAllResourcesThatMatches(bucketName, resources, prefix, keyPattern);
} else {
findProgressivelyWithPartialMatch(bucketName, resources, prefix, keyPattern);
}
}
private void findAllResourcesThatMatches(String bucketName, Set<Resource> resources, String prefix, String keyPattern) {
ListObjectsRequest listObjectsRequest = new ListObjectsRequest().withBucketName(bucketName).withPrefix(prefix);
ObjectListing objectListing = null;
do {
try {
if (objectListing == null) {
objectListing = this.amazonS3.listObjects(listObjectsRequest);
} else {
objectListing = this.amazonS3.listNextBatchOfObjects(objectListing);
}
Set<Resource> newResources = getResourcesFromObjectSummaries(bucketName, keyPattern, objectListing.getObjectSummaries());
if (!newResources.isEmpty()) {
resources.addAll(newResources);
}
} catch (AmazonS3Exception e) {
if (301 != e.getStatusCode()) {
throw e;
}
}
} while (objectListing != null && objectListing.isTruncated());
}
/**
* Searches for matching keys progressively. This means that instead of retrieving all keys given a prefix, it goes
* down one level at a time and filters out all non-matching results. This avoids a lot of unused requests results.
* WARNING: This method does not truncate results. Therefore all matching resources will be returned regardless of
* the truncation.
*/
private void findProgressivelyWithPartialMatch(String bucketName, Set<Resource> resources, String prefix, String keyPattern) {
ListObjectsRequest listObjectsRequest = new ListObjectsRequest().withBucketName(bucketName).withDelimiter("/").withPrefix(prefix);
ObjectListing objectListing = null;
do {
if (objectListing == null) {
objectListing = this.amazonS3.listObjects(listObjectsRequest);
} else {
objectListing = this.amazonS3.listNextBatchOfObjects(objectListing);
}
Set<Resource> newResources = getResourcesFromObjectSummaries(bucketName, keyPattern, objectListing.getObjectSummaries());
if (!newResources.isEmpty()) {
resources.addAll(newResources);
}
for (String commonPrefix : objectListing.getCommonPrefixes()) {
if (isKeyPathMatchesPartially(keyPattern, commonPrefix)) {
findPathMatchingKeyInBucket(bucketName, resources, commonPrefix, keyPattern);
}
}
} while (objectListing.isTruncated());
}
private String getRemainingPatternPart(String keyPattern, String path) {
int numberOfSlashes = StringUtils.countOccurrencesOf(path, "/");
int indexOfNthSlash = getIndexOfNthOccurrence(keyPattern, "/", numberOfSlashes);
return indexOfNthSlash == -1 ? null : keyPattern.substring(indexOfNthSlash);
}
private boolean isKeyPathMatchesPartially(String keyPattern, String keyPath) {
int numberOfSlashes = StringUtils.countOccurrencesOf(keyPath, "/");
int indexOfNthSlash = getIndexOfNthOccurrence(keyPattern, "/", numberOfSlashes);
if (indexOfNthSlash != -1) {
return this.pathMatcher.match(keyPattern.substring(0, indexOfNthSlash), keyPath);
} else {
return false;
}
}
private int getIndexOfNthOccurrence(String str, String sub, int pos) {
int result = 0;
String subStr = str;
for (int i = 0; i < pos; i++) {
int nthOccurrence = subStr.indexOf(sub);
if (nthOccurrence == -1) {
return -1;
} else {
result += nthOccurrence + 1;
subStr = subStr.substring(nthOccurrence + 1);
}
}
return result;
}
private Set<Resource> getResourcesFromObjectSummaries(String bucketName, String keyPattern, List<S3ObjectSummary> objectSummaries) {
Set<Resource> resources = new HashSet<>();
for (S3ObjectSummary objectSummary : objectSummaries) {
String keyPath = SimpleStorageNameUtils.getLocationForBucketAndObject(bucketName, objectSummary.getKey());
if (this.pathMatcher.match(keyPattern, objectSummary.getKey())) {
Resource resource = this.simpleStorageResourceLoader.getResource(keyPath);
if (resource.exists()) {
resources.add(resource);
}
}
}
return resources;
}
private List<String> findMatchingBuckets(String bucketPattern) {
List<Bucket> buckets = this.amazonS3.listBuckets();
List<String> matchingBuckets = new ArrayList<>();
for (Bucket bucket : buckets) {
this.amazonS3.getBucketLocation(bucket.getName());
if (this.pathMatcher.match(bucketPattern, bucket.getName())) {
matchingBuckets.add(bucket.getName());
}
}
return matchingBuckets;
}
@Override
public Resource getResource(String location) {
return this.simpleStorageResourceLoader.getResource(location);
}
@Override
public ClassLoader getClassLoader() {
return this.simpleStorageResourceLoader.getClassLoader();
}
}
| |
package org.helianto.security.domain;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import javax.persistence.Column;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.Table;
import javax.persistence.Transient;
import javax.persistence.UniqueConstraint;
import javax.persistence.Version;
import org.helianto.core.domain.enums.ActivityState;
import org.helianto.user.domain.UserGroup;
import com.fasterxml.jackson.annotation.JsonIgnore;
/**
* Domain class to represent user authority.
*
* Grants roles to user groups.
*
* @author mauriciofernandesdecastro
*/
@javax.persistence.Entity
@Table(name="core_authority",
uniqueConstraints = {
@UniqueConstraint(columnNames={"userGroupId", "serviceCode"})
}
)
public class UserAuthority implements Serializable {
private static final long serialVersionUID = 1L;
@Id @GeneratedValue(strategy=GenerationType.AUTO)
private int id;
@Version
private int version;
@JsonIgnore
@ManyToOne
@JoinColumn(name="userGroupId", nullable=true)
private UserGroup userGroup;
@Transient
private Integer userGroupId = 0;
@Transient
private String userGroupName = "";
@Column(length=20)
private String serviceCode;
@Column(length=128)
private String serviceExtension;
@Column(length=20)
@Enumerated(EnumType.STRING)
private ActivityState authorityState = ActivityState.ACTIVE;
@Transient
private Integer selfIdentityId = 0;
/**
* Constructor.
*/
public UserAuthority() {
super();
}
/**
* Constructor.
*
* @param userGroup
* @param serviceCode
*/
public UserAuthority(UserGroup userGroup, String serviceCode) {
this();
setUserGroup(userGroup);
setServiceCode(serviceCode);
}
/**
* Constructor.
*
* @param userGroup
* @param serviceCode
* @param extensions
*/
public UserAuthority(UserGroup userGroup, String serviceCode, String extensions) {
this(userGroup, serviceCode);
setServiceExtension(extensions);
}
/**
* Constructor.
*
* @param id
* @param userGroupId
* @param serviceCode
* @param serviceExtension
*/
public UserAuthority(
Integer id
, Integer userGroupId
, String serviceCode
, String serviceExtension
, String userGroupName) {
this();
this.id = id;
this.userGroupId = userGroupId;
this.serviceCode = serviceCode;
this.serviceExtension = serviceExtension;
this.userGroupName = userGroupName;
}
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
/**
* User group where authorities are applied.
*/
public UserGroup getUserGroup() {
return userGroup;
}
public void setUserGroup(UserGroup userGroup) {
this.userGroup = userGroup;
}
/**
* <<Transient>> user group id.
*/
public Integer getUserGroupId() {
if (getUserGroup()!=null) {
return getUserGroup().getId();
}
return userGroupId;
}
public void setUserGroupId(Integer userGroupId) {
this.userGroupId = userGroupId;
}
/**
* <<Transient>> user group name.
*/
public String getUserGroupName() {
if (getUserGroup()!=null) {
return getUserGroup().getUserName();
}
return userGroupName;
}
public void setUserGroupName(String userGroupName) {
this.userGroupName = userGroupName;
}
/**
* Service code.
*/
public String getServiceCode() {
return serviceCode;
}
public void setServiceCode(String serviceCode) {
this.serviceCode = serviceCode;
}
public String getServiceExtension() {
return serviceExtension;
}
public void setServiceExtension(String serviceExtension) {
this.serviceExtension = serviceExtension;
}
public ActivityState getAuthorityState() {
return authorityState;
}
public void setAuthorityState(ActivityState authorityState) {
this.authorityState = authorityState;
}
/**
* <<Transient>> self identity id.
*
* <p>Convenient transient field to provide the logged user with privileges
* assigned to herself.</p>
*/
public Integer getSelfIdentityId() {
return selfIdentityId;
}
public void setSelfIdentityId(Integer selfIdentityId) {
this.selfIdentityId = selfIdentityId;
}
/**
* Expands user authorities to user roles, including "ROLE_SELF_ID_x", where
* x is the authorized user identity primary key.
*
* @param userRole
* @param identityId
*/
public static List<String> getRoleNames(List<UserAuthority> adapterList) {
List<String> roleNames = new ArrayList<>();
for (UserAuthority userAuthorityReadAdapter: adapterList) {
roleNames.addAll(getUserAuthoritiesAsString(
userAuthorityReadAdapter.getServiceCode()
, userAuthorityReadAdapter.getServiceExtension()));
}
return roleNames;
}
/**
* Converts user roles to authorities.
*
* @param serviceName
* @param serviceExtensions
*/
public static Set<String> getUserAuthoritiesAsString(String serviceName, String serviceExtensions) {
Set<String> roleNames = new LinkedHashSet<String>();
roleNames.add(formatRole(serviceName, null));
String[] extensions = serviceExtensions.split(",");
for (String extension: extensions) {
roleNames.add(formatRole(serviceName, extension));
}
return roleNames;
}
/**
* Convenient conversion for authorities.
*/
public Set<String> getUserAuthoritiesAsString() {
return getUserAuthoritiesAsString(getServiceCode(), getServiceExtension());
}
/**
* Internal role formatter.
*
* @param serviceName
* @param extension
*/
public static String formatRole(String serviceName, String extension) {
StringBuilder sb = new StringBuilder("ROLE_").append(serviceName.toUpperCase());
if (extension!=null && extension.length()>0) {
sb.append("_").append(extension.trim());
}
return sb.toString();
}
/**
* Merger.
*
* @param command
*/
public UserAuthority merge(UserAuthority command) {
setServiceCode(command.getServiceCode());
setServiceExtension(command.getServiceExtension());
setAuthorityState(command.getAuthorityState());
return this;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result
+ ((serviceCode == null) ? 0 : serviceCode.hashCode());
result = prime * result
+ ((userGroup == null) ? 0 : userGroup.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
UserAuthority other = (UserAuthority) obj;
if (serviceCode == null) {
if (other.serviceCode != null)
return false;
} else if (!serviceCode.equals(other.serviceCode))
return false;
if (userGroup == null) {
if (other.userGroup != null)
return false;
} else if (!userGroup.equals(other.userGroup))
return false;
return true;
}
}
| |
/*
* Copyright 2016-present Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.yangutils.parser.impl.listeners;
import java.io.IOException;
import java.util.ListIterator;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.onosproject.yangutils.datamodel.YangContainer;
import org.onosproject.yangutils.datamodel.YangDataTypes;
import org.onosproject.yangutils.datamodel.YangLeaf;
import org.onosproject.yangutils.datamodel.YangLeafList;
import org.onosproject.yangutils.datamodel.YangList;
import org.onosproject.yangutils.datamodel.YangModule;
import org.onosproject.yangutils.datamodel.YangNode;
import org.onosproject.yangutils.datamodel.YangNodeType;
import org.onosproject.yangutils.datamodel.YangStatusType;
import org.onosproject.yangutils.parser.exceptions.ParserException;
import org.onosproject.yangutils.parser.impl.YangUtilsParserManager;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.Is.is;
/**
* Test case for reference listener.
*/
public class ReferenceListenerTest {
@Rule
public ExpectedException thrown = ExpectedException.none();
private final YangUtilsParserManager manager = new YangUtilsParserManager();
/**
* Checks valid reference statement.
*/
@Test
public void processReferenceStatement() throws IOException, ParserException {
YangNode node = manager.getDataModel("src/test/resources/ReferenceStatement.yang");
// Check whether the data model tree returned is of type module.
assertThat((node instanceof YangModule), is(true));
// Check whether the node type is set properly to module.
assertThat(node.getNodeType(), is(YangNodeType.MODULE_NODE));
// Check whether the module name is set correctly.
YangModule yangNode = (YangModule) node;
assertThat(yangNode.getName(), is("Test"));
ListIterator<YangLeaf> leafIterator = yangNode.getListOfLeaf().listIterator();
YangLeaf leafInfo = leafIterator.next();
// Check whether the reference is set correctly.
assertThat(leafInfo.getName(), is("invalid-interval"));
assertThat(leafInfo.getReference(), is("\"RFC 6020\""));
}
/**
* Checks whether exception is thrown for invalid reference statement.
*/
@Test
public void processReferenceWithoutStatementEnd() throws IOException, ParserException {
thrown.expect(ParserException.class);
thrown.expectMessage("mismatched input '}' expecting {';', '+'}");
YangNode node = manager.getDataModel("src/test/resources/ReferenceWithoutStatementEnd.yang");
}
/**
* Checks valid reference statement under module.
*/
@Test
public void processModuleSubStatementReference() throws IOException, ParserException {
YangNode node = manager.getDataModel("src/test/resources/ModuleSubStatementReference.yang");
// Check whether the data model tree returned is of type module.
assertThat((node instanceof YangModule), is(true));
// Check whether the node type is set properly to module.
assertThat(node.getNodeType(), is(YangNodeType.MODULE_NODE));
// Check whether the module name is set correctly.
YangModule yangNode = (YangModule) node;
assertThat(yangNode.getName(), is("Test"));
// Check whether the reference is set correctly.
assertThat(yangNode.getReference(), is("\"RFC 6020\""));
}
/**
* Checks valid reference statement under module.
*/
@Test
public void processReferenceEmptyStatement() throws IOException, ParserException {
YangNode node = manager.getDataModel("src/test/resources/ReferenceEmptyStatement.yang");
// Check whether the data model tree returned is of type module.
assertThat((node instanceof YangModule), is(true));
// Check whether the node type is set properly to module.
assertThat(node.getNodeType(), is(YangNodeType.MODULE_NODE));
// Check whether the module name is set correctly.
YangModule yangNode = (YangModule) node;
assertThat(yangNode.getName(), is("Test"));
// Check whether the reference is set correctly.
assertThat(yangNode.getReference(), is("\"\""));
}
/**
* Checks valid reference statement as sub-statement of revision.
*/
@Test
public void processRevisionSubStatementReference() throws IOException, ParserException {
YangNode node = manager.getDataModel("src/test/resources/RevisionSubStatementReference.yang");
// Check whether the data model tree returned is of type module.
assertThat((node instanceof YangModule), is(true));
// Check whether the node type is set properly to module.
assertThat(node.getNodeType(), is(YangNodeType.MODULE_NODE));
// Check whether the module name is set correctly.
YangModule yangNode = (YangModule) node;
assertThat(yangNode.getName(), is("Test"));
// Check whether the reference is set correctly.
assertThat(yangNode.getRevision().getReference(), is("\"revision reference\""));
}
/**
* Checks reference statement as sub-statement of container.
*/
@Test
public void processContainerSubStatementReference() throws IOException, ParserException {
YangNode node = manager.getDataModel("src/test/resources/ContainerSubStatementReference.yang");
// Check whether the data model tree returned is of type module.
assertThat((node instanceof YangModule), is(true));
// Check whether the node type is set properly to module.
assertThat(node.getNodeType(), is(YangNodeType.MODULE_NODE));
// Check whether the module name is set correctly.
YangModule yangNode = (YangModule) node;
assertThat(yangNode.getName(), is("Test"));
// Check whether the reference value is set correctly.
YangContainer container = (YangContainer) yangNode.getChild();
assertThat(container.getName(), is("valid"));
assertThat(container.getReference(), is("\"container reference\""));
// Check whether leaf properties as set correctly.
ListIterator<YangLeaf> leafIterator = container.getListOfLeaf().listIterator();
YangLeaf leafInfo = leafIterator.next();
assertThat(leafInfo.getName(), is("invalid-interval"));
assertThat(leafInfo.getDataType().getDataTypeName(), is("uint16"));
assertThat(leafInfo.getDataType().getDataType(), is(YangDataTypes.UINT16));
assertThat(leafInfo.getUnits(), is("\"seconds\""));
assertThat(leafInfo.getDescription(), is("\"Interval before a route is declared invalid\""));
assertThat(leafInfo.isMandatory(), is(true));
assertThat(leafInfo.getStatus(), is(YangStatusType.CURRENT));
assertThat(leafInfo.getReference(), is("\"RFC 6020\""));
}
/**
* Checks reference statement as sub-statement of list.
*/
@Test
public void processListSubStatementReference() throws IOException, ParserException {
YangNode node = manager.getDataModel("src/test/resources/ListSubStatementReference.yang");
assertThat((node instanceof YangModule), is(true));
// Check whether the node type is set properly to module.
assertThat(node.getNodeType(), is(YangNodeType.MODULE_NODE));
// Check whether the module name is set correctly.
YangModule yangNode = (YangModule) node;
assertThat(yangNode.getName(), is("Test"));
// Check whether the list is child of module and description value is set correctly.
YangList yangList = (YangList) yangNode.getChild();
assertThat(yangList.getName(), is("valid"));
assertThat(yangList.isConfig(), is(true));
assertThat(yangList.getReference(), is("\"list reference\""));
// Check whether leaf properties as set correctly.
ListIterator<YangLeaf> leafIterator = yangList.getListOfLeaf().listIterator();
YangLeaf leafInfo = leafIterator.next();
assertThat(leafInfo.getName(), is("invalid-interval"));
assertThat(leafInfo.getDataType().getDataTypeName(), is("uint16"));
assertThat(leafInfo.getDataType().getDataType(), is(YangDataTypes.UINT16));
assertThat(leafInfo.getUnits(), is("\"seconds\""));
assertThat(leafInfo.getDescription(), is("\"Interval before a route is declared invalid\""));
assertThat(leafInfo.isMandatory(), is(true));
assertThat(leafInfo.getStatus(), is(YangStatusType.CURRENT));
assertThat(leafInfo.getReference(), is("\"RFC 6020\""));
}
/**
* Checks valid reference statement as sub-statement of leaf-list.
*/
@Test
public void processLeafListSubStatementReference() throws IOException, ParserException {
YangNode node = manager.getDataModel("src/test/resources/LeafListSubStatementReference.yang");
// Check whether the data model tree returned is of type module.
assertThat((node instanceof YangModule), is(true));
// Check whether the node type is set properly to module.
assertThat(node.getNodeType(), is(YangNodeType.MODULE_NODE));
// Check whether the module name is set correctly.
YangModule yangNode = (YangModule) node;
assertThat(yangNode.getName(), is("Test"));
ListIterator<YangLeafList> leafListIterator = yangNode.getListOfLeafList().listIterator();
YangLeafList leafListInfo = leafListIterator.next();
// Check whether description value is set correctly.
assertThat(leafListInfo.getName(), is("invalid-interval"));
assertThat(leafListInfo.getReference(), is("\"RFC 6020\""));
}
}
| |
package org.arquillian.cube.docker.impl.client.config;
import java.lang.reflect.Field;
import java.util.Collection;
import java.util.Map;
public class CubeContainer {
private String workingDir;
private Boolean disableNetwork;
private String hostName;
private Collection<String> portSpecs;
private String user;
private Boolean tty;
private Boolean stdinOpen;
private Boolean stdinOnce;
private Long memoryLimit;
private Long memorySwap;
private Integer cpuShares;
private String cpuSet;
private Boolean attachStdin;
private Boolean attachSterr;
private Collection<String> env;
private Collection<String> cmd;
private Collection<String> dns;
private Collection<String> volumes;
private Collection<String> volumesFrom;
private Collection<String> binds;
private Collection<Link> links;
private Collection<PortBinding> portBindings;
private Collection<ExposedPort> exposedPorts;
private Boolean privileged;
private Boolean publishAllPorts;
private String networkMode;
private Collection<String> dnsSearch;
private Collection<Device> devices;
private RestartPolicy restartPolicy;
private Collection<String> capAdd;
private Collection<String> capDrop;
private Collection<String> extraHosts;
private Collection<String> entryPoint;
private String domainName;
private Boolean alwaysPull = false;
private Await await;
private Image image;
private String extendsImage;
private boolean ReadonlyRootfs;
private Map<String, String> labels;
private BuildImage buildImage;
private Collection<BeforeStop> beforeStop;
public Image getImage() {
return image;
}
public void setImage(Image image) {
this.image = image;
}
public BuildImage getBuildImage() {
return buildImage;
}
public void setBuildImage(BuildImage buildImage) {
this.buildImage = buildImage;
}
public Collection<PortBinding> getPortBindings() {
return portBindings;
}
public void setPortBindings(Collection<PortBinding> portBindings) {
this.portBindings = portBindings;
}
public Collection<ExposedPort> getExposedPorts() {
return exposedPorts;
}
public void setExposedPorts(Collection<ExposedPort> exposedPorts) {
this.exposedPorts = exposedPorts;
}
public Boolean getReadonlyRootfs() {
return ReadonlyRootfs;
}
public void setReadonlyRootfs(Boolean readonlyRootfs) {
this.ReadonlyRootfs = readonlyRootfs;
}
public Map<String, String> getLabels() {
return labels;
}
public void setLabels(Map<String, String> labels) {
this.labels = labels;
}
public String getWorkingDir() {
return workingDir;
}
public void setWorkingDir(String workingDir) {
this.workingDir = workingDir;
}
public Boolean getDisableNetwork() {
return disableNetwork;
}
public void setDisableNetwork(Boolean disableNetwork) {
this.disableNetwork = disableNetwork;
}
public String getHostName() {
return hostName;
}
public void setHostName(String hostName) {
this.hostName = hostName;
}
public Collection<String> getPortSpecs() {
return portSpecs;
}
public void setPortSpecs(Collection<String> portSpecs) {
this.portSpecs = portSpecs;
}
public String getUser() {
return user;
}
public void setUser(String user) {
this.user = user;
}
public Boolean getTty() {
return tty;
}
public void setTty(Boolean tty) {
this.tty = tty;
}
public Boolean getStdinOpen() {
return stdinOpen;
}
public void setStdinOpen(Boolean stdinOpen) {
this.stdinOpen = stdinOpen;
}
public Boolean getStdinOnce() {
return stdinOnce;
}
public void setStdinOnce(Boolean stdinOnce) {
this.stdinOnce = stdinOnce;
}
public Long getMemoryLimit() {
return memoryLimit;
}
public void setMemoryLimit(Long memoryLimit) {
this.memoryLimit = memoryLimit;
}
public Long getMemorySwap() {
return memorySwap;
}
public void setMemorySwap(Long memorySwap) {
this.memorySwap = memorySwap;
}
public Integer getCpuShares() {
return cpuShares;
}
public void setCpuShares(Integer cpuShares) {
this.cpuShares = cpuShares;
}
public String getCpuSet() {
return cpuSet;
}
public void setCpuSet(String cpuSet) {
this.cpuSet = cpuSet;
}
public Boolean getAttachStdin() {
return attachStdin;
}
public void setAttachStdin(Boolean attachStdin) {
this.attachStdin = attachStdin;
}
public Boolean getAttachSterr() {
return attachSterr;
}
public void setAttachSterr(Boolean attachSterr) {
this.attachSterr = attachSterr;
}
public Collection<String> getEnv() {
return env;
}
public void setEnv(Collection<String> env) {
this.env = env;
}
public Collection<String> getCmd() {
return cmd;
}
public void setCmd(Collection<String> cmd) {
this.cmd = cmd;
}
public Collection<String> getDns() {
return dns;
}
public void setDns(Collection<String> dns) {
this.dns = dns;
}
public Collection<String> getVolumes() {
return volumes;
}
public void setVolumes(Collection<String> volumes) {
this.volumes = volumes;
}
public Collection<String> getVolumesFrom() {
return volumesFrom;
}
public void setVolumesFrom(Collection<String> volumesFrom) {
this.volumesFrom = volumesFrom;
}
public Collection<String> getBinds() {
return binds;
}
public void setBinds(Collection<String> binds) {
this.binds = binds;
}
public Collection<Link> getLinks() {
return links;
}
public void setLinks(Collection<Link> links) {
this.links = links;
}
public Boolean getPrivileged() {
return privileged;
}
public void setPrivileged(Boolean privileged) {
this.privileged = privileged;
}
public Boolean getPublishAllPorts() {
return publishAllPorts;
}
public void setPublishAllPorts(Boolean publishAllPorts) {
this.publishAllPorts = publishAllPorts;
}
public String getNetworkMode() {
return networkMode;
}
public void setNetworkMode(String networkMode) {
this.networkMode = networkMode;
}
public Collection<String> getDnsSearch() {
return dnsSearch;
}
public void setDnsSearch(Collection<String> dnsSearch) {
this.dnsSearch = dnsSearch;
}
public Collection<Device> getDevices() {
return devices;
}
public void setDevices(Collection<Device> devices) {
this.devices = devices;
}
public Collection<String> getCapAdd() {
return capAdd;
}
public void setCapAdd(Collection<String> capAdd) {
this.capAdd = capAdd;
}
public Collection<String> getCapDrop() {
return capDrop;
}
public void setCapDrop(Collection<String> capDrop) {
this.capDrop = capDrop;
}
public Collection<String> getExtraHosts() {
return extraHosts;
}
public void setExtraHosts(Collection<String> extraHosts) {
this.extraHosts = extraHosts;
}
public Collection<String> getEntryPoint() {
return entryPoint;
}
public void setEntryPoint(Collection<String> entryPoint) {
this.entryPoint = entryPoint;
}
public String getDomainName() {
return domainName;
}
public void setDomainName(String domainName) {
this.domainName = domainName;
}
public Boolean getAlwaysPull() {
return alwaysPull;
}
public void setAlwaysPull(Boolean alwaysPull) {
this.alwaysPull = alwaysPull;
}
public RestartPolicy getRestartPolicy() {
return restartPolicy;
}
public void setRestartPolicy(RestartPolicy restartPolicy) {
this.restartPolicy = restartPolicy;
}
public Await getAwait() {
return await;
}
public void setAwait(Await await) {
this.await = await;
}
public boolean hasAwait() {
return this.await != null;
}
public String getExtends() {
return extendsImage;
}
public void setExtends(String extendsImage) {
this.extendsImage = extendsImage;
}
public Collection<BeforeStop> getBeforeStop() {
return beforeStop;
}
public void setBeforeStop(Collection<BeforeStop> beforeStop) {
this.beforeStop = beforeStop;
}
public boolean hasBeforeStop() {
return this.beforeStop != null && !this.beforeStop.isEmpty();
}
public void merge(CubeContainer container) {
try {
Field[] fields = CubeContainer.class.getDeclaredFields();
for (Field field : fields) {
if (!field.isAccessible()) {
field.setAccessible(true);
}
Object thisVal = field.get(this);
if (thisVal == null) {
Object otherVal = field.get(container);
field.set(this, otherVal);
}
}
} catch (Exception e) {
throw new RuntimeException("Could not merge objects", e);
}
}
}
| |
package com.chariotsolutions.nfc.plugin;
import java.io.IOException;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
// using wildcard imports so we can support Cordova 3.x
import org.apache.cordova.*; // Cordova 3.x
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import android.app.Activity;
import android.app.PendingIntent;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.IntentFilter.MalformedMimeTypeException;
import android.net.Uri;
import android.nfc.FormatException;
import android.nfc.NdefMessage;
import android.nfc.NdefRecord;
import android.nfc.NfcAdapter;
import android.nfc.NfcEvent;
import android.nfc.Tag;
import android.nfc.TagLostException;
import android.nfc.tech.Ndef;
import android.nfc.tech.NdefFormatable;
import android.os.Parcelable;
import android.util.Log;
public class NfcPlugin extends CordovaPlugin implements NfcAdapter.OnNdefPushCompleteCallback {
private static final String REGISTER_MIME_TYPE = "registerMimeType";
private static final String REMOVE_MIME_TYPE = "removeMimeType";
private static final String REGISTER_NDEF = "registerNdef";
private static final String REMOVE_NDEF = "removeNdef";
private static final String REGISTER_NDEF_FORMATABLE = "registerNdefFormatable";
private static final String REGISTER_DEFAULT_TAG = "registerTag";
private static final String REMOVE_DEFAULT_TAG = "removeTag";
private static final String WRITE_TAG = "writeTag";
private static final String MAKE_READ_ONLY = "makeReadOnly";
private static final String ERASE_TAG = "eraseTag";
private static final String SHARE_TAG = "shareTag";
private static final String UNSHARE_TAG = "unshareTag";
private static final String HANDOVER = "handover"; // Android Beam
private static final String STOP_HANDOVER = "stopHandover";
private static final String ENABLED = "enabled";
private static final String INIT = "init";
private static final String SHOW_SETTINGS = "showSettings";
private static final String NDEF = "ndef";
private static final String NDEF_MIME = "ndef-mime";
private static final String NDEF_FORMATABLE = "ndef-formatable";
private static final String TAG_DEFAULT = "tag";
private static final String STATUS_NFC_OK = "NFC_OK";
private static final String STATUS_NO_NFC = "NO_NFC";
private static final String STATUS_NFC_DISABLED = "NFC_DISABLED";
private static final String STATUS_NDEF_PUSH_DISABLED = "NDEF_PUSH_DISABLED";
private static final String TAG = "NfcPlugin";
private final List<IntentFilter> intentFilters = new ArrayList<IntentFilter>();
private final ArrayList<String[]> techLists = new ArrayList<String[]>();
private NdefMessage p2pMessage = null;
private PendingIntent pendingIntent = null;
private Intent savedIntent = null;
private CallbackContext shareTagCallback;
private CallbackContext handoverCallback;
private CallbackContext handleEventCallback;
@Override
public boolean execute(String action, JSONArray data, CallbackContext callbackContext) throws JSONException {
Log.d(TAG, "execute " + action);
// showSettings can be called if NFC is disabled
// might want to skip this if NO_NFC
if (action.equalsIgnoreCase(SHOW_SETTINGS)) {
showSettings(callbackContext);
return true;
}
if (!getNfcStatus().equals(STATUS_NFC_OK)) {
callbackContext.error(getNfcStatus());
return true; // short circuit
}
createPendingIntent();
if (action.equalsIgnoreCase(REGISTER_MIME_TYPE)) {
registerMimeType(data, callbackContext);
} else if (action.equalsIgnoreCase(REMOVE_MIME_TYPE)) {
removeMimeType(data, callbackContext);
} else if (action.equalsIgnoreCase(REGISTER_NDEF)) {
registerNdef(callbackContext);
} else if (action.equalsIgnoreCase(REMOVE_NDEF)) {
removeNdef(callbackContext);
} else if (action.equalsIgnoreCase(REGISTER_NDEF_FORMATABLE)) {
registerNdefFormatable(callbackContext);
} else if (action.equals(REGISTER_DEFAULT_TAG)) {
registerDefaultTag(callbackContext);
} else if (action.equals(REMOVE_DEFAULT_TAG)) {
removeDefaultTag(callbackContext);
} else if (action.equalsIgnoreCase(WRITE_TAG)) {
writeTag(data, callbackContext);
} else if (action.equalsIgnoreCase(MAKE_READ_ONLY)) {
makeReadOnly(callbackContext);
} else if (action.equalsIgnoreCase(ERASE_TAG)) {
eraseTag(callbackContext);
} else if (action.equalsIgnoreCase(SHARE_TAG)) {
shareTag(data, callbackContext);
} else if (action.equalsIgnoreCase(UNSHARE_TAG)) {
unshareTag(callbackContext);
} else if (action.equalsIgnoreCase(HANDOVER)) {
handover(data, callbackContext);
} else if (action.equalsIgnoreCase(STOP_HANDOVER)) {
stopHandover(callbackContext);
} else if (action.equalsIgnoreCase(INIT)) {
init(callbackContext);
} else if (action.equalsIgnoreCase(ENABLED)) {
// status is checked before every call
// if code made it here, NFC is enabled
callbackContext.success(STATUS_NFC_OK);
} else {
// invalid action
return false;
}
return true;
}
private String getNfcStatus() {
NfcAdapter nfcAdapter = NfcAdapter.getDefaultAdapter(getActivity());
if (nfcAdapter == null) {
return STATUS_NO_NFC;
} else if (!nfcAdapter.isEnabled()) {
return STATUS_NFC_DISABLED;
} else {
return STATUS_NFC_OK;
}
}
private void registerDefaultTag(CallbackContext callbackContext) {
addTagFilter();
callbackContext.success();
}
private void removeDefaultTag(CallbackContext callbackContext) {
removeTagFilter();
callbackContext.success();
}
private void registerNdefFormatable(CallbackContext callbackContext) {
addTechList(new String[]{NdefFormatable.class.getName()});
callbackContext.success();
}
private void registerNdef(CallbackContext callbackContext) {
addTechList(new String[]{Ndef.class.getName()});
callbackContext.success();
}
private void removeNdef(CallbackContext callbackContext) {
removeTechList(new String[]{Ndef.class.getName()});
callbackContext.success();
}
private void unshareTag(CallbackContext callbackContext) {
p2pMessage = null;
stopNdefPush();
shareTagCallback = null;
callbackContext.success();
}
private void init(CallbackContext callbackContext) {
Log.d(TAG, "Enabling plugin " + getIntent());
handleEventCallback = callbackContext;
startNfc();
if (!recycledIntent()) {
parseMessage();
}
JSONObject jsonMessage = new JSONObject();
try
{
jsonMessage.put("class","log");
jsonMessage.put("message","Initialized the NfcPlugin");
}
catch (JSONException e)
{
e.printStackTrace();
}
sendData(jsonMessage);
}
private void removeMimeType(JSONArray data, CallbackContext callbackContext) throws JSONException {
String mimeType = "";
try {
mimeType = data.getString(0);
/*boolean removed =*/ removeIntentFilter(mimeType);
callbackContext.success();
} catch (MalformedMimeTypeException e) {
callbackContext.error("Invalid MIME Type " + mimeType);
}
}
private void registerMimeType(JSONArray data, CallbackContext callbackContext) throws JSONException {
String mimeType = "";
try {
mimeType = data.getString(0);
intentFilters.add(createIntentFilter(mimeType));
callbackContext.success();
} catch (MalformedMimeTypeException e) {
callbackContext.error("Invalid MIME Type " + mimeType);
}
}
// Cheating and writing an empty record. We may actually be able to erase some tag types.
private void eraseTag(CallbackContext callbackContext) throws JSONException {
Tag tag = savedIntent.getParcelableExtra(NfcAdapter.EXTRA_TAG);
NdefRecord[] records = {
new NdefRecord(NdefRecord.TNF_EMPTY, new byte[0], new byte[0], new byte[0])
};
writeNdefMessage(new NdefMessage(records), tag, callbackContext);
}
private void writeTag(JSONArray data, CallbackContext callbackContext) throws JSONException {
if (getIntent() == null) { // TODO remove this and handle LostTag
callbackContext.error("Failed to write tag, received null intent");
}
Tag tag = savedIntent.getParcelableExtra(NfcAdapter.EXTRA_TAG);
NdefRecord[] records = Util.jsonToNdefRecords(data.getString(0));
writeNdefMessage(new NdefMessage(records), tag, callbackContext);
}
private void writeNdefMessage(final NdefMessage message, final Tag tag, final CallbackContext callbackContext) {
cordova.getThreadPool().execute(new Runnable() {
@Override
public void run() {
try {
Ndef ndef = Ndef.get(tag);
if (ndef != null) {
ndef.connect();
if (ndef.isWritable()) {
int size = message.toByteArray().length;
if (ndef.getMaxSize() < size) {
callbackContext.error("Tag capacity is " + ndef.getMaxSize() +
" bytes, message is " + size + " bytes.");
} else {
ndef.writeNdefMessage(message);
callbackContext.success();
}
} else {
callbackContext.error("Tag is read only");
}
ndef.close();
} else {
NdefFormatable formatable = NdefFormatable.get(tag);
if (formatable != null) {
formatable.connect();
formatable.format(message);
callbackContext.success();
formatable.close();
} else {
callbackContext.error("Tag doesn't support NDEF");
}
}
} catch (FormatException e) {
callbackContext.error(e.getMessage());
} catch (TagLostException e) {
callbackContext.error(e.getMessage());
} catch (IOException e) {
callbackContext.error(e.getMessage());
}
}
});
}
private void makeReadOnly(final CallbackContext callbackContext) throws JSONException {
if (getIntent() == null) { // Lost Tag
callbackContext.error("Failed to make tag read only, received null intent");
return;
}
final Tag tag = savedIntent.getParcelableExtra(NfcAdapter.EXTRA_TAG);
if (tag == null) {
callbackContext.error("Failed to make tag read only, tag is null");
return;
}
cordova.getThreadPool().execute(new Runnable() {
@Override
public void run() {
boolean success = false;
String message = "Could not make tag read only";
Ndef ndef = Ndef.get(tag);
try {
if (ndef != null) {
ndef.connect();
if (!ndef.isWritable()) {
message = "Tag is not writable";
} else if (ndef.canMakeReadOnly()) {
success = ndef.makeReadOnly();
} else {
message = "Tag can not be made read only";
}
} else {
message = "Tag is not NDEF";
}
} catch (IOException e) {
Log.e(TAG, "Failed to make tag read only", e);
if (e.getMessage() != null) {
message = e.getMessage();
} else {
message = e.toString();
}
}
if (success) {
callbackContext.success();
} else {
callbackContext.error(message);
}
}
});
}
private void shareTag(JSONArray data, CallbackContext callbackContext) throws JSONException {
NdefRecord[] records = Util.jsonToNdefRecords(data.getString(0));
this.p2pMessage = new NdefMessage(records);
startNdefPush(callbackContext);
}
// setBeamPushUris
// Every Uri you provide must have either scheme 'file' or scheme 'content'.
// Note that this takes priority over setNdefPush
//
// See http://developer.android.com/reference/android/nfc/NfcAdapter.html#setBeamPushUris(android.net.Uri[],%20android.app.Activity)
private void handover(JSONArray data, CallbackContext callbackContext) throws JSONException {
Uri[] uri = new Uri[data.length()];
for (int i = 0; i < data.length(); i++) {
uri[i] = Uri.parse(data.getString(i));
}
startNdefBeam(callbackContext, uri);
}
private void stopHandover(CallbackContext callbackContext) throws JSONException {
stopNdefBeam();
handoverCallback = null;
callbackContext.success();
}
private void showSettings(CallbackContext callbackContext) {
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.JELLY_BEAN) {
Intent intent = new Intent(android.provider.Settings.ACTION_NFC_SETTINGS);
getActivity().startActivity(intent);
} else {
Intent intent = new Intent(android.provider.Settings.ACTION_WIRELESS_SETTINGS);
getActivity().startActivity(intent);
}
callbackContext.success();
}
private void createPendingIntent() {
if (pendingIntent == null) {
Activity activity = getActivity();
Intent intent = new Intent(activity, activity.getClass());
intent.addFlags(Intent.FLAG_ACTIVITY_SINGLE_TOP | Intent.FLAG_ACTIVITY_CLEAR_TOP);
pendingIntent = PendingIntent.getActivity(activity, 0, intent, 0);
}
}
private void addTechList(String[] list) {
this.addTechFilter();
this.addToTechList(list);
}
private void removeTechList(String[] list) {
this.removeTechFilter();
this.removeFromTechList(list);
}
private void addTechFilter() {
intentFilters.add(new IntentFilter(NfcAdapter.ACTION_TECH_DISCOVERED));
}
private boolean removeTechFilter() {
boolean removed = false;
Iterator<IntentFilter> iter = intentFilters.iterator();
while (iter.hasNext()) {
IntentFilter intentFilter = iter.next();
if (NfcAdapter.ACTION_TECH_DISCOVERED.equals(intentFilter.getAction(0))) {
iter.remove();
removed = true;
}
}
return removed;
}
private void addTagFilter() {
intentFilters.add(new IntentFilter(NfcAdapter.ACTION_TAG_DISCOVERED));
}
private boolean removeTagFilter() {
boolean removed = false;
Iterator<IntentFilter> iter = intentFilters.iterator();
while (iter.hasNext()) {
IntentFilter intentFilter = iter.next();
if (NfcAdapter.ACTION_TAG_DISCOVERED.equals(intentFilter.getAction(0))) {
iter.remove();
removed = true;
}
}
return removed;
}
private void startNfc() {
createPendingIntent(); // onResume can call startNfc before execute
getActivity().runOnUiThread(new Runnable() {
public void run() {
NfcAdapter nfcAdapter = NfcAdapter.getDefaultAdapter(getActivity());
if (nfcAdapter != null && !getActivity().isFinishing()) {
try {
nfcAdapter.enableForegroundDispatch(getActivity(), getPendingIntent(), getIntentFilters(), getTechLists());
if (p2pMessage != null) {
nfcAdapter.setNdefPushMessage(p2pMessage, getActivity());
}
} catch (IllegalStateException e) {
// issue 110 - user exits app with home button while nfc is initializing
Log.w(TAG, "Illegal State Exception starting NFC. Assuming application is terminating.");
}
}
}
});
}
private void stopNfc() {
Log.d(TAG, "stopNfc");
getActivity().runOnUiThread(new Runnable() {
public void run() {
NfcAdapter nfcAdapter = NfcAdapter.getDefaultAdapter(getActivity());
if (nfcAdapter != null) {
try {
nfcAdapter.disableForegroundDispatch(getActivity());
} catch (IllegalStateException e) {
// issue 125 - user exits app with back button while nfc
Log.w(TAG, "Illegal State Exception stopping NFC. Assuming application is terminating.");
}
}
}
});
}
private void startNdefBeam(final CallbackContext callbackContext, final Uri[] uris) {
getActivity().runOnUiThread(new Runnable() {
public void run() {
NfcAdapter nfcAdapter = NfcAdapter.getDefaultAdapter(getActivity());
if (nfcAdapter == null) {
callbackContext.error(STATUS_NO_NFC);
} else if (!nfcAdapter.isNdefPushEnabled()) {
callbackContext.error(STATUS_NDEF_PUSH_DISABLED);
} else {
nfcAdapter.setOnNdefPushCompleteCallback(NfcPlugin.this, getActivity());
try {
nfcAdapter.setBeamPushUris(uris, getActivity());
PluginResult result = new PluginResult(PluginResult.Status.NO_RESULT);
result.setKeepCallback(true);
handoverCallback = callbackContext;
callbackContext.sendPluginResult(result);
} catch (IllegalArgumentException e) {
callbackContext.error(e.getMessage());
}
}
}
});
}
private void startNdefPush(final CallbackContext callbackContext) {
getActivity().runOnUiThread(new Runnable() {
public void run() {
NfcAdapter nfcAdapter = NfcAdapter.getDefaultAdapter(getActivity());
if (nfcAdapter == null) {
callbackContext.error(STATUS_NO_NFC);
} else if (!nfcAdapter.isNdefPushEnabled()) {
callbackContext.error(STATUS_NDEF_PUSH_DISABLED);
} else {
nfcAdapter.setNdefPushMessage(p2pMessage, getActivity());
nfcAdapter.setOnNdefPushCompleteCallback(NfcPlugin.this, getActivity());
PluginResult result = new PluginResult(PluginResult.Status.NO_RESULT);
result.setKeepCallback(true);
shareTagCallback = callbackContext;
callbackContext.sendPluginResult(result);
}
}
});
}
private void stopNdefPush() {
getActivity().runOnUiThread(new Runnable() {
public void run() {
NfcAdapter nfcAdapter = NfcAdapter.getDefaultAdapter(getActivity());
if (nfcAdapter != null) {
nfcAdapter.setNdefPushMessage(null, getActivity());
}
}
});
}
private void stopNdefBeam() {
getActivity().runOnUiThread(new Runnable() {
public void run() {
NfcAdapter nfcAdapter = NfcAdapter.getDefaultAdapter(getActivity());
if (nfcAdapter != null) {
nfcAdapter.setBeamPushUris(null, getActivity());
}
}
});
}
private void addToTechList(String[] techs) {
techLists.add(techs);
}
private void removeFromTechList(String[] techs) {
techLists.remove(techs);
}
private boolean removeIntentFilter(String mimeType) throws MalformedMimeTypeException {
boolean removed = false;
Iterator<IntentFilter> iter = intentFilters.iterator();
while (iter.hasNext()) {
IntentFilter intentFilter = iter.next();
String mt = intentFilter.getDataType(0);
if (mimeType.equals(mt)) {
iter.remove();
removed = true;
}
}
return removed;
}
private IntentFilter createIntentFilter(String mimeType) throws MalformedMimeTypeException {
IntentFilter intentFilter = new IntentFilter(NfcAdapter.ACTION_NDEF_DISCOVERED);
intentFilter.addDataType(mimeType);
return intentFilter;
}
private PendingIntent getPendingIntent() {
return pendingIntent;
}
private IntentFilter[] getIntentFilters() {
return intentFilters.toArray(new IntentFilter[intentFilters.size()]);
}
private String[][] getTechLists() {
//noinspection ToArrayCallWithZeroLengthArrayArgument
return techLists.toArray(new String[0][0]);
}
void parseMessage() {
cordova.getThreadPool().execute(new Runnable() {
@Override
public void run() {
Log.d(TAG, "parseMessage " + getIntent());
Intent intent = getIntent();
String action = intent.getAction();
Log.d(TAG, "action " + action);
if (action == null) {
return;
}
Tag tag = intent.getParcelableExtra(NfcAdapter.EXTRA_TAG);
Parcelable[] messages = intent.getParcelableArrayExtra((NfcAdapter.EXTRA_NDEF_MESSAGES));
if (action.equals(NfcAdapter.ACTION_NDEF_DISCOVERED)) {
Ndef ndef = Ndef.get(tag);
fireNdefEvent(NDEF_MIME, ndef, messages);
} else if (action.equals(NfcAdapter.ACTION_TECH_DISCOVERED)) {
for (String tagTech : tag.getTechList()) {
Log.d(TAG, tagTech);
if (tagTech.equals(NdefFormatable.class.getName())) {
fireNdefFormatableEvent(tag);
} else if (tagTech.equals(Ndef.class.getName())) { //
Ndef ndef = Ndef.get(tag);
fireNdefEvent(NDEF, ndef, messages);
}
}
}
if (action.equals(NfcAdapter.ACTION_TAG_DISCOVERED)) {
fireTagEvent(tag);
}
setIntent(new Intent());
}
});
}
private void fireNdefEvent(String type, Ndef ndef, Parcelable[] messages) {
sendEventData(type, buildNdefJSON(ndef, messages));
}
private void fireNdefFormatableEvent (Tag tag) {
sendEventData(NDEF_FORMATABLE, tag);
}
private void fireTagEvent (Tag tag) {
sendEventData(TAG_DEFAULT, tag);
}
JSONObject buildNdefJSON(Ndef ndef, Parcelable[] messages) {
JSONObject json = Util.ndefToJSON(ndef);
// ndef is null for peer-to-peer
// ndef and messages are null for ndef format-able
if (ndef == null && messages != null) {
try {
if (messages.length > 0) {
NdefMessage message = (NdefMessage) messages[0];
json.put("ndefMessage", Util.messageToJSON(message));
// guessing type, would prefer a more definitive way to determine type
json.put("type", "NDEF Push Protocol");
}
if (messages.length > 1) {
Log.wtf(TAG, "Expected one ndefMessage but found " + messages.length);
}
} catch (JSONException e) {
// shouldn't happen
Log.e(Util.TAG, "Failed to convert ndefMessage into json", e);
}
}
return json;
}
private boolean recycledIntent() { // TODO this is a kludge, find real solution
int flags = getIntent().getFlags();
if ((flags & Intent.FLAG_ACTIVITY_LAUNCHED_FROM_HISTORY) == Intent.FLAG_ACTIVITY_LAUNCHED_FROM_HISTORY) {
Log.i(TAG, "Launched from history, killing recycled intent");
setIntent(new Intent());
return true;
}
return false;
}
@Override
public void onPause(boolean multitasking) {
Log.d(TAG, "onPause " + getIntent());
super.onPause(multitasking);
if (multitasking) {
// nfc can't run in background
stopNfc();
}
}
@Override
public void onResume(boolean multitasking) {
Log.d(TAG, "onResume " + getIntent());
super.onResume(multitasking);
startNfc();
}
@Override
public void onNewIntent(Intent intent) {
Log.d(TAG, "onNewIntent " + intent);
super.onNewIntent(intent);
setIntent(intent);
savedIntent = intent;
parseMessage();
}
private Activity getActivity() {
return this.cordova.getActivity();
}
private Intent getIntent() {
return getActivity().getIntent();
}
private void setIntent(Intent intent) {
getActivity().setIntent(intent);
}
@Override
public void onNdefPushComplete(NfcEvent event) {
// handover (beam) take precedence over share tag (ndef push)
if (handoverCallback != null) {
PluginResult result = new PluginResult(PluginResult.Status.OK, "Beamed Message to Peer");
result.setKeepCallback(true);
handoverCallback.sendPluginResult(result);
} else if (shareTagCallback != null) {
PluginResult result = new PluginResult(PluginResult.Status.OK, "Shared Message with Peer");
result.setKeepCallback(true);
shareTagCallback.sendPluginResult(result);
}
}
//CordovaWebView.sendJavascript is deprecated and no longer works in Cordova 5 with pre KitKat devices.
private void sendEventData(final String type, Tag tag) {
sendEventData(type, Util.tagToJSON(tag));
}
private void sendEventData(final String type, JSONObject data) {
JSONObject jsonMessage = new JSONObject();
try
{
jsonMessage.put("class","event");
jsonMessage.put("type",type);
jsonMessage.put("data", data);
}
catch (JSONException e)
{
e.printStackTrace();
}
sendData(jsonMessage);
}
private void sendData(JSONObject data) {
String message = data.toString();
Log.v(TAG, message);
PluginResult dataResult = new PluginResult(PluginResult.Status.OK, message);
dataResult.setKeepCallback(true);
handleEventCallback.sendPluginResult(dataResult);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.parquet.filter2.statisticslevel;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import org.apache.parquet.io.api.Binary;
import org.junit.Test;
import org.apache.parquet.column.Encoding;
import org.apache.parquet.column.statistics.DoubleStatistics;
import org.apache.parquet.column.statistics.IntStatistics;
import org.apache.parquet.hadoop.metadata.ColumnPath;
import org.apache.parquet.filter2.predicate.FilterPredicate;
import org.apache.parquet.filter2.predicate.LogicalInverseRewriter;
import org.apache.parquet.filter2.predicate.Operators.BinaryColumn;
import org.apache.parquet.filter2.predicate.Operators.DoubleColumn;
import org.apache.parquet.filter2.predicate.Operators.IntColumn;
import org.apache.parquet.filter2.predicate.Statistics;
import org.apache.parquet.filter2.predicate.UserDefinedPredicate;
import org.apache.parquet.hadoop.metadata.ColumnChunkMetaData;
import org.apache.parquet.hadoop.metadata.CompressionCodecName;
import org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName;
import static org.apache.parquet.filter2.predicate.FilterApi.binaryColumn;
import static org.apache.parquet.io.api.Binary.fromString;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.apache.parquet.filter2.predicate.FilterApi.and;
import static org.apache.parquet.filter2.predicate.FilterApi.doubleColumn;
import static org.apache.parquet.filter2.predicate.FilterApi.eq;
import static org.apache.parquet.filter2.predicate.FilterApi.gt;
import static org.apache.parquet.filter2.predicate.FilterApi.gtEq;
import static org.apache.parquet.filter2.predicate.FilterApi.intColumn;
import static org.apache.parquet.filter2.predicate.FilterApi.lt;
import static org.apache.parquet.filter2.predicate.FilterApi.ltEq;
import static org.apache.parquet.filter2.predicate.FilterApi.not;
import static org.apache.parquet.filter2.predicate.FilterApi.notEq;
import static org.apache.parquet.filter2.predicate.FilterApi.or;
import static org.apache.parquet.filter2.predicate.FilterApi.userDefined;
import static org.apache.parquet.filter2.statisticslevel.StatisticsFilter.canDrop;
public class TestStatisticsFilter {
private static ColumnChunkMetaData getIntColumnMeta(IntStatistics stats, long valueCount) {
return ColumnChunkMetaData.get(ColumnPath.get("int", "column"),
PrimitiveTypeName.INT32,
CompressionCodecName.GZIP,
new HashSet<Encoding>(Arrays.asList(Encoding.PLAIN)),
stats,
0L, 0L, valueCount, 0L, 0L);
}
private static ColumnChunkMetaData getDoubleColumnMeta(DoubleStatistics stats, long valueCount) {
return ColumnChunkMetaData.get(ColumnPath.get("double", "column"),
PrimitiveTypeName.DOUBLE,
CompressionCodecName.GZIP,
new HashSet<Encoding>(Arrays.asList(Encoding.PLAIN)),
stats,
0L, 0L, valueCount, 0L, 0L);
}
private static final IntColumn intColumn = intColumn("int.column");
private static final DoubleColumn doubleColumn = doubleColumn("double.column");
private static final BinaryColumn missingColumn = binaryColumn("missing");
private static final IntColumn missingColumn2 = intColumn("missing.int");
private static final IntStatistics intStats = new IntStatistics();
private static final IntStatistics nullIntStats = new IntStatistics();
private static final DoubleStatistics doubleStats = new DoubleStatistics();
static {
intStats.setMinMax(10, 100);
doubleStats.setMinMax(10, 100);
nullIntStats.setMinMax(0, 0);
nullIntStats.setNumNulls(177);
}
private static final List<ColumnChunkMetaData> columnMetas = Arrays.asList(
getIntColumnMeta(intStats, 177L),
getDoubleColumnMeta(doubleStats, 177L));
private static final List<ColumnChunkMetaData> nullColumnMetas = Arrays.asList(
getIntColumnMeta(nullIntStats, 177L), // column of all nulls
getDoubleColumnMeta(doubleStats, 177L));
@Test
public void testEqNonNull() {
assertTrue(canDrop(eq(intColumn, 9), columnMetas));
assertFalse(canDrop(eq(intColumn, 10), columnMetas));
assertFalse(canDrop(eq(intColumn, 100), columnMetas));
assertTrue(canDrop(eq(intColumn, 101), columnMetas));
// drop columns of all nulls when looking for non-null value
assertTrue(canDrop(eq(intColumn, 0), nullColumnMetas));
assertTrue(canDrop(eq(missingColumn, fromString("any")), columnMetas));
}
@Test
public void testEqNull() {
IntStatistics statsNoNulls = new IntStatistics();
statsNoNulls.setMinMax(10, 100);
statsNoNulls.setNumNulls(0);
IntStatistics statsSomeNulls = new IntStatistics();
statsSomeNulls.setMinMax(10, 100);
statsSomeNulls.setNumNulls(3);
assertTrue(canDrop(eq(intColumn, null), Arrays.asList(
getIntColumnMeta(statsNoNulls, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
assertFalse(canDrop(eq(intColumn, null), Arrays.asList(
getIntColumnMeta(statsSomeNulls, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
assertFalse(canDrop(eq(missingColumn, null), columnMetas));
}
@Test
public void testNotEqNonNull() {
assertFalse(canDrop(notEq(intColumn, 9), columnMetas));
assertFalse(canDrop(notEq(intColumn, 10), columnMetas));
assertFalse(canDrop(notEq(intColumn, 100), columnMetas));
assertFalse(canDrop(notEq(intColumn, 101), columnMetas));
IntStatistics allSevens = new IntStatistics();
allSevens.setMinMax(7, 7);
assertTrue(canDrop(notEq(intColumn, 7), Arrays.asList(
getIntColumnMeta(allSevens, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
allSevens.setNumNulls(100L);
assertFalse(canDrop(notEq(intColumn, 7), Arrays.asList(
getIntColumnMeta(allSevens, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
allSevens.setNumNulls(177L);
assertFalse(canDrop(notEq(intColumn, 7), Arrays.asList(
getIntColumnMeta(allSevens, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
assertFalse(canDrop(notEq(missingColumn, fromString("any")), columnMetas));
}
@Test
public void testNotEqNull() {
IntStatistics statsNoNulls = new IntStatistics();
statsNoNulls.setMinMax(10, 100);
statsNoNulls.setNumNulls(0);
IntStatistics statsSomeNulls = new IntStatistics();
statsSomeNulls.setMinMax(10, 100);
statsSomeNulls.setNumNulls(3);
IntStatistics statsAllNulls = new IntStatistics();
statsAllNulls.setMinMax(0, 0);
statsAllNulls.setNumNulls(177);
assertFalse(canDrop(notEq(intColumn, null), Arrays.asList(
getIntColumnMeta(statsNoNulls, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
assertFalse(canDrop(notEq(intColumn, null), Arrays.asList(
getIntColumnMeta(statsSomeNulls, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
assertTrue(canDrop(notEq(intColumn, null), Arrays.asList(
getIntColumnMeta(statsAllNulls, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
assertTrue(canDrop(notEq(missingColumn, null), columnMetas));
}
@Test
public void testLt() {
assertTrue(canDrop(lt(intColumn, 9), columnMetas));
assertTrue(canDrop(lt(intColumn, 10), columnMetas));
assertFalse(canDrop(lt(intColumn, 100), columnMetas));
assertFalse(canDrop(lt(intColumn, 101), columnMetas));
assertTrue(canDrop(lt(intColumn, 0), nullColumnMetas));
assertTrue(canDrop(lt(intColumn, 7), nullColumnMetas));
assertTrue(canDrop(lt(missingColumn, fromString("any")), columnMetas));
}
@Test
public void testLtEq() {
assertTrue(canDrop(ltEq(intColumn, 9), columnMetas));
assertFalse(canDrop(ltEq(intColumn, 10), columnMetas));
assertFalse(canDrop(ltEq(intColumn, 100), columnMetas));
assertFalse(canDrop(ltEq(intColumn, 101), columnMetas));
assertTrue(canDrop(ltEq(intColumn, 0), nullColumnMetas));
assertTrue(canDrop(ltEq(intColumn, 7), nullColumnMetas));
assertTrue(canDrop(ltEq(missingColumn, fromString("any")), columnMetas));
}
@Test
public void testGt() {
assertFalse(canDrop(gt(intColumn, 9), columnMetas));
assertFalse(canDrop(gt(intColumn, 10), columnMetas));
assertTrue(canDrop(gt(intColumn, 100), columnMetas));
assertTrue(canDrop(gt(intColumn, 101), columnMetas));
assertTrue(canDrop(gt(intColumn, 0), nullColumnMetas));
assertTrue(canDrop(gt(intColumn, 7), nullColumnMetas));
assertTrue(canDrop(gt(missingColumn, fromString("any")), columnMetas));
}
@Test
public void testGtEq() {
assertFalse(canDrop(gtEq(intColumn, 9), columnMetas));
assertFalse(canDrop(gtEq(intColumn, 10), columnMetas));
assertFalse(canDrop(gtEq(intColumn, 100), columnMetas));
assertTrue(canDrop(gtEq(intColumn, 101), columnMetas));
assertTrue(canDrop(gtEq(intColumn, 0), nullColumnMetas));
assertTrue(canDrop(gtEq(intColumn, 7), nullColumnMetas));
assertTrue(canDrop(gtEq(missingColumn, fromString("any")), columnMetas));
}
@Test
public void testAnd() {
FilterPredicate yes = eq(intColumn, 9);
FilterPredicate no = eq(doubleColumn, 50D);
assertTrue(canDrop(and(yes, yes), columnMetas));
assertTrue(canDrop(and(yes, no), columnMetas));
assertTrue(canDrop(and(no, yes), columnMetas));
assertFalse(canDrop(and(no, no), columnMetas));
}
@Test
public void testOr() {
FilterPredicate yes = eq(intColumn, 9);
FilterPredicate no = eq(doubleColumn, 50D);
assertTrue(canDrop(or(yes, yes), columnMetas));
assertFalse(canDrop(or(yes, no), columnMetas));
assertFalse(canDrop(or(no, yes), columnMetas));
assertFalse(canDrop(or(no, no), columnMetas));
}
public static class SevensAndEightsUdp extends UserDefinedPredicate<Integer> {
@Override
public boolean keep(Integer value) {
if (value == null) {
return true;
}
throw new RuntimeException("this method should not be called with value != null");
}
@Override
public boolean canDrop(Statistics<Integer> statistics) {
return statistics.getMin() == 7 && statistics.getMax() == 7;
}
@Override
public boolean inverseCanDrop(Statistics<Integer> statistics) {
return statistics.getMin() == 8 && statistics.getMax() == 8;
}
}
public static class DropNullUdp extends SevensAndEightsUdp {
@Override
public boolean keep(Integer value) {
if (value == null) {
return false;
}
throw new RuntimeException("this method should not be called with value != null");
}
}
@Test
public void testUdp() {
FilterPredicate pred = userDefined(intColumn, SevensAndEightsUdp.class);
FilterPredicate invPred = LogicalInverseRewriter.rewrite(not(userDefined(intColumn, SevensAndEightsUdp.class)));
FilterPredicate udpDropMissingColumn = userDefined(missingColumn2, DropNullUdp.class);
FilterPredicate invUdpDropMissingColumn = LogicalInverseRewriter.rewrite(not(userDefined(missingColumn2, DropNullUdp.class)));
FilterPredicate udpKeepMissingColumn = userDefined(missingColumn2, SevensAndEightsUdp.class);
FilterPredicate invUdpKeepMissingColumn = LogicalInverseRewriter.rewrite(not(userDefined(missingColumn2, SevensAndEightsUdp.class)));
IntStatistics seven = new IntStatistics();
seven.setMinMax(7, 7);
IntStatistics eight = new IntStatistics();
eight.setMinMax(8, 8);
IntStatistics neither = new IntStatistics();
neither.setMinMax(1 , 2);
assertTrue(canDrop(pred, Arrays.asList(
getIntColumnMeta(seven, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
assertFalse(canDrop(pred, Arrays.asList(
getIntColumnMeta(eight, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
assertFalse(canDrop(pred, Arrays.asList(
getIntColumnMeta(neither, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
assertFalse(canDrop(invPred, Arrays.asList(
getIntColumnMeta(seven, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
assertTrue(canDrop(invPred, Arrays.asList(
getIntColumnMeta(eight, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
assertFalse(canDrop(invPred, Arrays.asList(
getIntColumnMeta(neither, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
// udpDropMissingColumn drops null column.
assertTrue(canDrop(udpDropMissingColumn, Arrays.asList(
getIntColumnMeta(seven, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
assertTrue(canDrop(udpDropMissingColumn, Arrays.asList(
getIntColumnMeta(eight, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
assertTrue(canDrop(udpDropMissingColumn, Arrays.asList(
getIntColumnMeta(neither, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
// invUdpDropMissingColumn (i.e., not(udpDropMissingColumn)) keeps null column.
assertFalse(canDrop(invUdpDropMissingColumn, Arrays.asList(
getIntColumnMeta(seven, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
assertFalse(canDrop(invUdpDropMissingColumn, Arrays.asList(
getIntColumnMeta(eight, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
assertFalse(canDrop(invUdpDropMissingColumn, Arrays.asList(
getIntColumnMeta(neither, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
// udpKeepMissingColumn keeps null column.
assertFalse(canDrop(udpKeepMissingColumn, Arrays.asList(
getIntColumnMeta(seven, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
assertFalse(canDrop(udpKeepMissingColumn, Arrays.asList(
getIntColumnMeta(eight, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
assertFalse(canDrop(udpKeepMissingColumn, Arrays.asList(
getIntColumnMeta(neither, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
// invUdpKeepMissingColumn (i.e., not(udpKeepMissingColumn)) drops null column.
assertTrue(canDrop(invUdpKeepMissingColumn, Arrays.asList(
getIntColumnMeta(seven, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
assertTrue(canDrop(invUdpKeepMissingColumn, Arrays.asList(
getIntColumnMeta(eight, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
assertTrue(canDrop(invUdpKeepMissingColumn, Arrays.asList(
getIntColumnMeta(neither, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
}
@Test
public void testClearExceptionForNots() {
List<ColumnChunkMetaData> columnMetas = Arrays.asList(
getDoubleColumnMeta(new DoubleStatistics(), 0L),
getIntColumnMeta(new IntStatistics(), 0L));
FilterPredicate pred = and(not(eq(doubleColumn, 12.0)), eq(intColumn, 17));
try {
canDrop(pred, columnMetas);
fail("This should throw");
} catch (IllegalArgumentException e) {
assertEquals("This predicate contains a not! Did you forget to run this predicate through LogicalInverseRewriter?"
+ " not(eq(double.column, 12.0))", e.getMessage());
}
}
}
| |
package org.ethereum.jsonrpc;
import org.ethereum.config.SystemProperties;
import org.ethereum.core.*;
import org.ethereum.crypto.ECKey;
import org.ethereum.crypto.HashUtil;
import org.ethereum.crypto.SHA3Helper;
import org.ethereum.db.ByteArrayWrapper;
import org.ethereum.core.TransactionInfo;
import org.ethereum.db.TransactionStore;
import org.ethereum.facade.Ethereum;
import org.ethereum.listener.CompositeEthereumListener;
import org.ethereum.listener.EthereumListenerAdapter;
import org.ethereum.manager.WorldManager;
import org.ethereum.mine.BlockMiner;
import org.ethereum.net.client.Capability;
import org.ethereum.net.client.ConfigCapabilities;
import org.ethereum.net.rlpx.Node;
import org.ethereum.net.server.ChannelManager;
import org.ethereum.net.server.PeerServer;
import org.ethereum.solidity.compiler.SolidityCompiler;
import org.ethereum.sync.SyncManager;
import org.ethereum.sync.listener.CompositeSyncListener;
import org.ethereum.sync.listener.SyncListenerAdapter;
import org.ethereum.util.ByteUtil;
import org.ethereum.util.RLP;
import org.ethereum.vm.DataWord;
import org.ethereum.vm.LogInfo;
import org.ethereum.vm.program.ProgramResult;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import javax.annotation.PostConstruct;
import java.math.BigInteger;
import java.util.*;
import java.util.concurrent.atomic.AtomicInteger;
import static java.lang.Math.max;
import static org.ethereum.jsonrpc.TypeConverter.*;
import static org.ethereum.jsonrpc.TypeConverter.StringHexToByteArray;
import static org.ethereum.util.ByteUtil.EMPTY_BYTE_ARRAY;
import static org.ethereum.util.ByteUtil.bigIntegerToBytes;
/**
* Created by Anton Nashatyrev on 25.11.2015.
*/
@Component
public class JsonRpcImpl implements JsonRpc {
private static final Logger logger = LoggerFactory.getLogger("jsonrpc");
public class BinaryCallArguments {
public long nonce;
public long gasPrice;
public long gasLimit;
public String toAddress;
public long value;
public byte[] data;
public void setArguments(CallArguments args) throws Exception {
nonce =0;
gasPrice = 0;
if (args.gasPrice != null && args.gasPrice.length()!=0)
gasPrice = JSonHexToLong(args.gasPrice);
gasLimit = 4_000_000;
if (args.gasLimit != null && args.gasLimit.length()!=0)
gasLimit = JSonHexToLong(args.gasLimit);
toAddress = JSonHexToHex(args.to);
value=0;
if (args.value != null && args.value.length()!=0)
value = JSonHexToLong(args.value);
data = null;
if (args.data != null && args.data.length()!=0)
data = TypeConverter.StringHexToByteArray(args.data);
}
}
@Autowired
SystemProperties config;
@Autowired
ConfigCapabilities configCapabilities;
@Autowired
public WorldManager worldManager;
@Autowired
public Repository repository;
@Autowired
BlockchainImpl blockchain;
@Autowired
Ethereum eth;
@Autowired
PeerServer peerServer;
@Autowired
SyncManager syncManager;
@Autowired
TransactionStore txStore;
@Autowired
ChannelManager channelManager;
@Autowired
CompositeSyncListener compositeSyncListener;
@Autowired
CompositeEthereumListener compositeEthereumListener;
@Autowired
BlockMiner blockMiner;
@Autowired
TransactionStore transactionStore;
@Autowired
PendingStateImpl pendingState;
long initialBlockNumber;
long maxBlockNumberSeen;
Map<ByteArrayWrapper, Account> accounts = new HashMap<>();
@PostConstruct
private void init() {
initialBlockNumber = blockchain.getBestBlock().getNumber();
compositeSyncListener.add(new SyncListenerAdapter() {
@Override
public void onNewBlockNumber(long number) {
maxBlockNumberSeen = max(maxBlockNumberSeen, number);
}
});
compositeEthereumListener.addListener(new EthereumListenerAdapter() {
@Override
public void onBlock(Block block, List<TransactionReceipt> receipts) {
for (Filter filter : installedFilters.values()) {
filter.newBlockReceived(block);
}
}
@Override
public void onPendingTransactionsReceived(List<Transaction> transactions) {
for (Filter filter : installedFilters.values()) {
for (Transaction tx : transactions) {
filter.newPendingTx(tx);
}
}
}
});
}
public long JSonHexToLong(String x) throws Exception {
if (!x.startsWith("0x"))
throw new Exception("Incorrect hex syntax");
x = x.substring(2);
return Long.parseLong(x, 16);
}
public int JSonHexToInt(String x) throws Exception {
if (!x.startsWith("0x"))
throw new Exception("Incorrect hex syntax");
x = x.substring(2);
return Integer.parseInt(x, 16);
}
public String JSonHexToHex(String x) throws Exception {
if (!x.startsWith("0x"))
throw new Exception("Incorrect hex syntax");
x = x.substring(2);
return x;
}
public Block getBlockByJSonHash(String blockHash) throws Exception {
byte[] bhash = TypeConverter.StringHexToByteArray(blockHash);
return worldManager.getBlockchain().getBlockByHash(bhash);
}
private Block getByJsonBlockId(String id) {
if ("earliest".equalsIgnoreCase(id)) {
return blockchain.getBlockByNumber(0);
} else if ("latest".equalsIgnoreCase(id)) {
return blockchain.getBestBlock();
} else if ("pending".equalsIgnoreCase(id)) {
return null;
} else {
long blockNumber = StringHexToBigInteger(id).longValue();
return blockchain.getBlockByNumber(blockNumber);
}
}
private Repository getRepoByJsonBlockId(String id) {
if ("pending".equalsIgnoreCase(id)) {
return pendingState.getRepository();
} else {
Block block = getByJsonBlockId(id);
return this.repository.getSnapshotTo(block.getStateRoot());
}
}
private List<Transaction> getTransactionsByJsonBlockId(String id) {
if ("pending".equalsIgnoreCase(id)) {
return pendingState.getAllPendingTransactions();
} else {
Block block = getByJsonBlockId(id);
return block != null ? block.getTransactionsList() : null;
}
}
protected Account getAccount(String address) throws Exception {
return accounts.get(new ByteArrayWrapper(StringHexToByteArray(address)));
}
protected Account addAccount(String seed) {
return addAccount(ECKey.fromPrivate(SHA3Helper.sha3(seed.getBytes())));
}
protected Account addAccount(ECKey key) {
Account account = new Account();
account.init(key);
accounts.put(new ByteArrayWrapper(account.getAddress()), account);
return account;
}
public String web3_clientVersion() {
String s = "EthereumJ" + "/v" + SystemProperties.CONFIG.projectVersion() + "/" +
System.getProperty("os.name") + "/Java1.7/" + SystemProperties.CONFIG.projectVersionModifier();
if (logger.isDebugEnabled()) logger.debug("web3_clientVersion(): " + s);
return s;
};
public String web3_sha3(String data) throws Exception {
String s = null;
try {
byte[] result = HashUtil.sha3(TypeConverter.StringHexToByteArray(data));
return s = TypeConverter.toJsonHex(result);
} finally {
if (logger.isDebugEnabled()) logger.debug("web3_sha3(" + data + "): " + s);
}
}
public String net_version() {
String s = null;
try {
return s = eth_protocolVersion();
} finally {
if (logger.isDebugEnabled()) logger.debug("net_version(): " + s);
}
}
public String net_peerCount(){
String s = null;
try {
int n = channelManager.getActivePeers().size();
return s = TypeConverter.toJsonHex(n);
} finally {
if (logger.isDebugEnabled()) logger.debug("net_peerCount(): " + s);
}
}
public boolean net_listening() {
Boolean s = null;
try {
return s = peerServer.isListening();
}finally {
if (logger.isDebugEnabled()) logger.debug("net_listening(): " + s);
}
}
public String eth_protocolVersion(){
String s = null;
try {
int version = 0;
for (Capability capability : configCapabilities.getConfigCapabilities()) {
if (capability.isEth()) {
version = max(version, capability.getVersion());
}
}
return s = Integer.toString(version);
} finally {
if (logger.isDebugEnabled()) logger.debug("eth_protocolVersion(): " + s);
}
}
public SyncingResult eth_syncing(){
SyncingResult s = new SyncingResult();
try {
s.startingBlock = TypeConverter.toJsonHex(initialBlockNumber);
s.currentBlock = TypeConverter.toJsonHex(blockchain.getBestBlock().getNumber());
s.highestBlock = TypeConverter.toJsonHex(maxBlockNumberSeen);
return s;
}finally {
if (logger.isDebugEnabled()) logger.debug("eth_syncing(): " + s);
}
};
public String eth_coinbase() {
String s = null;
try {
return s = toJsonHex(blockchain.getMinerCoinbase());
} finally {
if (logger.isDebugEnabled()) logger.debug("eth_coinbase(): " + s);
}
}
public boolean eth_mining() {
Boolean s = null;
try {
return s = blockMiner.isMining();
} finally {
if (logger.isDebugEnabled()) logger.debug("eth_mining(): " + s);
}
}
public String eth_hashrate() {
String s = null;
try {
return s = null;
} finally {
if (logger.isDebugEnabled()) logger.debug("eth_hashrate(): " + s);
}
}
public String eth_gasPrice(){
String s = null;
try {
return s = TypeConverter.toJsonHex(eth.getGasPrice());
} finally {
if (logger.isDebugEnabled()) logger.debug("eth_gasPrice(): " + s);
}
}
public String[] eth_accounts() {
String[] s = null;
try {
return s = personal_listAccounts();
} finally {
if (logger.isDebugEnabled()) logger.debug("eth_accounts(): " + Arrays.toString(s));
}
}
public String eth_blockNumber(){
String s = null;
try {
Block bestBlock = blockchain.getBestBlock();
long b = 0;
if (bestBlock != null) {
b = bestBlock.getNumber();
}
return s = TypeConverter.toJsonHex(b);
} finally {
if (logger.isDebugEnabled()) logger.debug("eth_blockNumber(): " + s);
}
}
public String eth_getBalance(String address, String blockId) throws Exception {
String s = null;
try {
byte[] addressAsByteArray = TypeConverter.StringHexToByteArray(address);
BigInteger balance = getRepoByJsonBlockId(blockId).getBalance(addressAsByteArray);
return s = TypeConverter.toJsonHex(balance);
} finally {
if (logger.isDebugEnabled()) logger.debug("eth_getBalance(" + address + ", " + blockId + "): " + s);
}
}
public String eth_getBalance(String address) throws Exception {
String s = null;
try {
return s = eth_getBalance(address, "latest");
} finally {
if (logger.isDebugEnabled()) logger.debug("eth_getBalance(" + address + "): " + s);
}
}
@Override
public String eth_getStorageAt(String address, String storageIdx, String blockId) throws Exception {
String s = null;
try {
byte[] addressAsByteArray = StringHexToByteArray(address);
DataWord storageValue = getRepoByJsonBlockId(blockId).
getStorageValue(addressAsByteArray, new DataWord(StringHexToByteArray(storageIdx)));
return s = TypeConverter.toJsonHex(storageValue.getData());
} finally {
if (logger.isDebugEnabled()) logger.debug("eth_getStorageAt(" + address + ", " + storageIdx + ", " + blockId + "): " + s);
}
}
@Override
public String eth_getTransactionCount(String address, String blockId) throws Exception {
String s = null;
try {
byte[] addressAsByteArray = TypeConverter.StringHexToByteArray(address);
BigInteger nonce = getRepoByJsonBlockId(blockId).getNonce(addressAsByteArray);
return s = TypeConverter.toJsonHex(nonce);
} finally {
if (logger.isDebugEnabled()) logger.debug("eth_getTransactionCount(" + address + ", " + blockId + "): " + s);
}
}
public String eth_getBlockTransactionCountByHash(String blockHash) throws Exception {
String s = null;
try {
Block b = getBlockByJSonHash(blockHash);
if (b == null) return null;
long n = b.getTransactionsList().size();
return s = TypeConverter.toJsonHex(n);
} finally {
if (logger.isDebugEnabled()) logger.debug("eth_getBlockTransactionCountByHash(" + blockHash + "): " + s);
}
}
public String eth_getBlockTransactionCountByNumber(String bnOrId) throws Exception {
String s = null;
try {
List<Transaction> list = getTransactionsByJsonBlockId(bnOrId);
if (list == null) return null;
long n = list.size();
return s = TypeConverter.toJsonHex(n);
} finally {
if (logger.isDebugEnabled()) logger.debug("eth_getBlockTransactionCountByNumber(" + bnOrId + "): " + s);
}
}
public String eth_getUncleCountByBlockHash(String blockHash) throws Exception {
String s = null;
try {
Block b = getBlockByJSonHash(blockHash);
if (b == null) return null;
long n = b.getUncleList().size();
return s = TypeConverter.toJsonHex(n);
} finally {
if (logger.isDebugEnabled()) logger.debug("eth_getUncleCountByBlockHash(" + blockHash + "): " + s);
}
}
public String eth_getUncleCountByBlockNumber(String bnOrId) throws Exception {
String s = null;
try {
Block b = getByJsonBlockId(bnOrId);
if (b == null) return null;
long n = b.getUncleList().size();
return s = TypeConverter.toJsonHex(n);
} finally {
if (logger.isDebugEnabled()) logger.debug("eth_getUncleCountByBlockNumber(" + bnOrId + "): " + s);
}
}
public String eth_getCode(String address, String blockId) throws Exception {
String s = null;
try {
byte[] addressAsByteArray = TypeConverter.StringHexToByteArray(address);
byte[] code = getRepoByJsonBlockId(blockId).getCode(addressAsByteArray);
return s = TypeConverter.toJsonHex(code);
} finally {
if (logger.isDebugEnabled()) logger.debug("eth_getCode(" + address + ", " + blockId + "): " + s);
}
}
public String eth_sign(String addr,String data) throws Exception {
String s = null;
try {
String ha = JSonHexToHex(addr);
Account account = getAccount(ha);
if (account==null)
throw new Exception("Inexistent account");
// Todo: is not clear from the spec what hash function must be used to sign
byte[] masgHash= HashUtil.sha3(TypeConverter.StringHexToByteArray(data));
ECKey.ECDSASignature signature = account.getEcKey().sign(masgHash);
// Todo: is not clear if result should be RlpEncoded or serialized by other means
byte[] rlpSig = RLP.encode(signature);
return s = TypeConverter.toJsonHex(rlpSig);
} finally {
if (logger.isDebugEnabled()) logger.debug("eth_sign(" + addr + ", " + data + "): " + s);
}
}
public String eth_sendTransaction(CallArguments args) throws Exception {
String s = null;
try {
Account account = getAccount(JSonHexToHex(args.from));
if (account == null)
throw new Exception("From address private key could not be found in this node");
if (args.data != null && args.data.startsWith("0x"))
args.data = args.data.substring(2);
Transaction tx = new Transaction(
bigIntegerToBytes(pendingState.getRepository().getNonce(account.getAddress())),
args.gasPrice != null ? StringHexToByteArray(args.gasPrice) : EMPTY_BYTE_ARRAY,
args.gasLimit != null ? StringHexToByteArray(args.gasLimit) : EMPTY_BYTE_ARRAY,
args.to != null ? StringHexToByteArray(args.to) : EMPTY_BYTE_ARRAY,
args.value != null ? StringHexToByteArray(args.value) : EMPTY_BYTE_ARRAY,
args.data != null ? StringHexToByteArray(args.data) : EMPTY_BYTE_ARRAY);
tx.sign(account.getEcKey().getPrivKeyBytes());
eth.submitTransaction(tx);
return s = TypeConverter.toJsonHex(tx.getHash());
} finally {
if (logger.isDebugEnabled()) logger.debug("eth_sendTransaction(" + args + "): " + s);
}
}
public String eth_sendTransaction(String from, String to, String gas,
String gasPrice, String value,String data,String nonce) throws Exception {
String s = null;
try {
Transaction tx = new Transaction(
TypeConverter.StringHexToByteArray(nonce),
TypeConverter.StringHexToByteArray(gasPrice),
TypeConverter.StringHexToByteArray(gas),
TypeConverter.StringHexToByteArray(to), /*receiveAddress*/
TypeConverter.StringHexToByteArray(value),
TypeConverter.StringHexToByteArray(data));
Account account = getAccount(from);
if (account == null) throw new RuntimeException("No account " + from);
tx.sign(account.getEcKey().getPrivKeyBytes());
eth.submitTransaction(tx);
return s = TypeConverter.toJsonHex(tx.getHash());
} finally {
if (logger.isDebugEnabled()) logger.debug("eth_sendTransaction(" +
"from = [" + from + "], to = [" + to + "], gas = [" + gas + "], gasPrice = [" + gasPrice +
"], value = [" + value + "], data = [" + data + "], nonce = [" + nonce + "]" + "): " + s);
}
}
public String eth_sendRawTransaction(String rawData) throws Exception {
String s = null;
try {
Transaction tx = new Transaction(StringHexToByteArray(rawData));
eth.submitTransaction(tx);
return s = TypeConverter.toJsonHex(tx.getHash());
} finally {
if (logger.isDebugEnabled()) logger.debug("eth_sendRawTransaction(" + rawData + "): " + s);
}
}
public ProgramResult createCallTxAndExecute(CallArguments args, Block block) throws Exception {
BinaryCallArguments bca = new BinaryCallArguments();
bca.setArguments(args);
Transaction tx = CallTransaction.createRawTransaction(0,
bca.gasPrice,
bca.gasLimit,
bca.toAddress,
bca.value,
bca.data);
ProgramResult res = eth.callConstant(tx, block);
return res;
}
public String eth_call(CallArguments args, String bnOrId) throws Exception {
String s = null;
try {
ProgramResult res = createCallTxAndExecute(args, getByJsonBlockId(bnOrId));
return s = TypeConverter.toJsonHex(res.getHReturn());
} finally {
if (logger.isDebugEnabled()) logger.debug("eth_call(" + args + "): " + s);
}
}
public String eth_estimateGas(CallArguments args) throws Exception {
String s = null;
try {
ProgramResult res = createCallTxAndExecute(args, blockchain.getBestBlock());
return s = TypeConverter.toJsonHex(res.getGasUsed());
} finally {
if (logger.isDebugEnabled()) logger.debug("eth_estimateGas(" + args + "): " + s);
}
}
public BlockResult getBlockResult(Block b, boolean fullTx) {
if (b==null)
return null;
boolean isPending = ByteUtil.byteArrayToLong(b.getNonce()) == 0;
BlockResult br = new BlockResult();
br.number = isPending ? null : TypeConverter.toJsonHex(b.getNumber());
br.hash = isPending ? null : TypeConverter.toJsonHex(b.getHash());
br.parentHash = TypeConverter.toJsonHex(b.getParentHash());
br.nonce = isPending ? null : TypeConverter.toJsonHex(b.getNonce());
br.sha3Uncles= TypeConverter.toJsonHex(b.getUnclesHash());
br.logsBloom = isPending ? null : TypeConverter.toJsonHex(b.getLogBloom());
br.transactionsRoot =TypeConverter.toJsonHex(b.getTxTrieRoot());
br.stateRoot = TypeConverter.toJsonHex(b.getStateRoot());
br.receiptsRoot =TypeConverter.toJsonHex(b.getReceiptsRoot());
br.miner = isPending ? null : TypeConverter.toJsonHex(b.getCoinbase());
br.difficulty = TypeConverter.toJsonHex(b.getDifficulty());
br.totalDifficulty = TypeConverter.toJsonHex(blockchain.getTotalDifficulty());
if (b.getExtraData() != null)
br.extraData =TypeConverter.toJsonHex(b.getExtraData());
br.size = TypeConverter.toJsonHex(b.getEncoded().length);
br.gasLimit =TypeConverter.toJsonHex(b.getGasLimit());
br.gasUsed =TypeConverter.toJsonHex(b.getGasUsed());
br.timestamp =TypeConverter.toJsonHex(b.getTimestamp());
List<Object> txes = new ArrayList<>();
if (fullTx) {
for (int i = 0; i < b.getTransactionsList().size(); i++) {
txes.add(new TransactionResultDTO(b, i, b.getTransactionsList().get(i)));
}
} else {
for (Transaction tx : b.getTransactionsList()) {
txes.add(toJsonHex(tx.getHash()));
}
}
br.transactions = txes.toArray();
List<String> ul = new ArrayList<>();
for (BlockHeader header : b.getUncleList()) {
ul.add(toJsonHex(header.getHash()));
}
br.uncles = ul.toArray(new String[ul.size()]);
return br;
}
public BlockResult eth_getBlockByHash(String blockHash,Boolean fullTransactionObjects) throws Exception {
BlockResult s = null;
try {
Block b = getBlockByJSonHash(blockHash);
return getBlockResult(b, fullTransactionObjects);
} finally {
if (logger.isDebugEnabled()) logger.debug("eth_getBlockByHash(" + blockHash + ", " + fullTransactionObjects + "): " + s);
}
}
public BlockResult eth_getBlockByNumber(String bnOrId,Boolean fullTransactionObjects) throws Exception {
BlockResult s = null;
try {
Block b;
if ("pending".equalsIgnoreCase(bnOrId)) {
b = blockchain.createNewBlock(blockchain.getBestBlock(), pendingState.getAllPendingTransactions(), Collections.<BlockHeader>emptyList());
} else {
b = getByJsonBlockId(bnOrId);
}
return s = (b == null ? null : getBlockResult(b, fullTransactionObjects));
} finally {
if (logger.isDebugEnabled()) logger.debug("eth_getBlockByNumber(" + bnOrId + ", " + fullTransactionObjects + "): " + s);
}
}
public TransactionResultDTO eth_getTransactionByHash(String transactionHash) throws Exception {
TransactionResultDTO s = null;
try {
TransactionInfo txInfo = transactionStore.get(StringHexToByteArray(transactionHash));
if (txInfo == null) {
return null;
}
Block block = blockchain.getBlockByHash(txInfo.getBlockHash());
// need to return txes only from main chain
Block mainBlock = blockchain.getBlockByNumber(block.getNumber());
if (!Arrays.equals(block.getHash(), mainBlock.getHash())) {
return null;
}
return s = new TransactionResultDTO(block, txInfo.getIndex(), block.getTransactionsList().get(txInfo.getIndex()));
} finally {
if (logger.isDebugEnabled()) logger.debug("eth_getTransactionByHash(" + transactionHash + "): " + s);
}
}
public TransactionResultDTO eth_getTransactionByBlockHashAndIndex(String blockHash,String index) throws Exception {
TransactionResultDTO s = null;
try {
Block b = getBlockByJSonHash(blockHash);
if (b == null) return null;
int idx = JSonHexToInt(index);
if (idx >= b.getTransactionsList().size()) return null;
Transaction tx = b.getTransactionsList().get(idx);
return s = new TransactionResultDTO(b, idx, tx);
} finally {
if (logger.isDebugEnabled()) logger.debug("eth_getTransactionByBlockHashAndIndex(" + blockHash + ", " + index + "): " + s);
}
}
public TransactionResultDTO eth_getTransactionByBlockNumberAndIndex(String bnOrId, String index) throws Exception {
TransactionResultDTO s = null;
try {
Block b = getByJsonBlockId(bnOrId);
List<Transaction> txs = getTransactionsByJsonBlockId(bnOrId);
if (txs == null) return null;
int idx = JSonHexToInt(index);
if (idx >= txs.size()) return null;
Transaction tx = txs.get(idx);
return s = new TransactionResultDTO(b, idx, tx);
} finally {
if (logger.isDebugEnabled()) logger.debug("eth_getTransactionByBlockNumberAndIndex(" + bnOrId + ", " + index + "): " + s);
}
}
public TransactionReceiptDTO eth_getTransactionReceipt(String transactionHash) throws Exception {
TransactionReceiptDTO s = null;
try {
byte[] hash = TypeConverter.StringHexToByteArray(transactionHash);
TransactionInfo txInfo = txStore.get(hash);
if (txInfo == null)
return null;
Block block = blockchain.getBlockByHash(txInfo.getBlockHash());
// need to return txes only from main chain
Block mainBlock = blockchain.getBlockByNumber(block.getNumber());
if (!Arrays.equals(block.getHash(), mainBlock.getHash())) {
return null;
}
return s = new TransactionReceiptDTO(block, txInfo);
} finally {
if (logger.isDebugEnabled()) logger.debug("eth_getTransactionReceipt(" + transactionHash + "): " + s);
}
}
@Override
public BlockResult eth_getUncleByBlockHashAndIndex(String blockHash, String uncleIdx) throws Exception {
BlockResult s = null;
try {
Block block = blockchain.getBlockByHash(StringHexToByteArray(blockHash));
if (block == null) return null;
int idx = JSonHexToInt(uncleIdx);
if (idx >= block.getUncleList().size()) return null;
BlockHeader uncleHeader = block.getUncleList().get(idx);
Block uncle = blockchain.getBlockByHash(uncleHeader.getHash());
if (uncle == null) {
uncle = new Block(uncleHeader, Collections.<Transaction>emptyList(), Collections.<BlockHeader>emptyList());
}
return s = getBlockResult(uncle, false);
} finally {
if (logger.isDebugEnabled()) logger.debug("eth_getUncleByBlockHashAndIndex(" + blockHash + ", " + uncleIdx + "): " + s);
}
}
@Override
public BlockResult eth_getUncleByBlockNumberAndIndex(String blockId, String uncleIdx) throws Exception {
BlockResult s = null;
try {
Block block = getByJsonBlockId(blockId);
return s = block == null ? null :
eth_getUncleByBlockHashAndIndex(toJsonHex(block.getHash()), uncleIdx);
} finally {
if (logger.isDebugEnabled()) logger.debug("eth_getUncleByBlockNumberAndIndex(" + blockId + ", " + uncleIdx + "): " + s);
}
}
@Override
public String[] eth_getCompilers() {
String[] s = null;
try {
return s = new String[] {"solidity"};
} finally {
if (logger.isDebugEnabled()) logger.debug("eth_getCompilers(): " + Arrays.toString(s));
}
}
@Override
public CompilationResult eth_compileLLL(String contract) {
throw new UnsupportedOperationException("LLL compiler not supported");
}
@Override
public CompilationResult eth_compileSolidity(String contract) throws Exception {
CompilationResult s = null;
try {
SolidityCompiler.Result res = SolidityCompiler.compile(
contract.getBytes(), true, SolidityCompiler.Options.ABI, SolidityCompiler.Options.BIN);
if (!res.errors.isEmpty()) {
throw new RuntimeException("Compilation error: " + res.errors);
}
org.ethereum.solidity.compiler.CompilationResult result = org.ethereum.solidity.compiler.CompilationResult.parse(res.output);
CompilationResult ret = new CompilationResult();
org.ethereum.solidity.compiler.CompilationResult.ContractMetadata contractMetadata = result.contracts.values().iterator().next();
ret.code = toJsonHex(contractMetadata.bin);
ret.info = new CompilationInfo();
ret.info.source = contract;
ret.info.language = "Solidity";
ret.info.languageVersion = "0";
ret.info.compilerVersion = result.version;
ret.info.abiDefinition = new CallTransaction.Contract(contractMetadata.abi);
return s = ret;
} finally {
if (logger.isDebugEnabled()) logger.debug("eth_compileSolidity(" + contract + ")" + s);
}
}
@Override
public CompilationResult eth_compileSerpent(String contract){
throw new UnsupportedOperationException("Serpent compiler not supported");
}
@Override
public String eth_resend() {
throw new UnsupportedOperationException("JSON RPC method eth_resend not implemented yet");
}
@Override
public String eth_pendingTransactions() {
throw new UnsupportedOperationException("JSON RPC method eth_pendingTransactions not implemented yet");
}
static class Filter {
static abstract class FilterEvent {
public abstract Object getJsonEventObject();
}
List<FilterEvent> events = new ArrayList<>();
public synchronized boolean hasNew() { return !events.isEmpty();}
public synchronized Object[] poll() {
Object[] ret = new Object[events.size()];
for (int i = 0; i < ret.length; i++) {
ret[i] = events.get(i).getJsonEventObject();
}
this.events.clear();
return ret;
}
protected synchronized void add(FilterEvent evt) {
events.add(evt);
}
public void newBlockReceived(Block b) {}
public void newPendingTx(Transaction tx) {}
}
static class NewBlockFilter extends Filter {
class NewBlockFilterEvent extends FilterEvent {
public final Block b;
NewBlockFilterEvent(Block b) {this.b = b;}
@Override
public String getJsonEventObject() {
return toJsonHex(b.getHash());
}
}
public void newBlockReceived(Block b) {
add(new NewBlockFilterEvent(b));
}
}
static class PendingTransactionFilter extends Filter {
class PendingTransactionFilterEvent extends FilterEvent {
private final Transaction tx;
PendingTransactionFilterEvent(Transaction tx) {this.tx = tx;}
@Override
public String getJsonEventObject() {
return toJsonHex(tx.getHash());
}
}
public void newPendingTx(Transaction tx) {
add(new PendingTransactionFilterEvent(tx));
}
}
class JsonLogFilter extends Filter {
class LogFilterEvent extends FilterEvent {
private final LogFilterElement el;
LogFilterEvent(LogFilterElement el) {
this.el = el;
}
@Override
public LogFilterElement getJsonEventObject() {
return el;
}
}
LogFilter logFilter;
boolean onNewBlock;
boolean onPendingTx;
public JsonLogFilter(LogFilter logFilter) {
this.logFilter = logFilter;
}
void onLogMatch(LogInfo logInfo, Block b, int txIndex, Transaction tx, int logIdx) {
add(new LogFilterEvent(new LogFilterElement(logInfo, b, txIndex, tx, logIdx)));
}
void onTransactionReceipt(TransactionReceipt receipt, Block b, int txIndex) {
if (logFilter.matchBloom(receipt.getBloomFilter())) {
int logIdx = 0;
for (LogInfo logInfo : receipt.getLogInfoList()) {
if (logFilter.matchBloom(logInfo.getBloom()) && logFilter.matchesExactly(logInfo)) {
onLogMatch(logInfo, b, txIndex, receipt.getTransaction(), logIdx);
}
logIdx++;
}
}
}
void onTransaction(Transaction tx, Block b, int txIndex) {
if (logFilter.matchesContractAddress(tx.getReceiveAddress())) {
TransactionInfo txInfo = transactionStore.get(tx.getHash());
onTransactionReceipt(txInfo.getReceipt(), b, txIndex);
}
}
void onBlock(Block b) {
if (logFilter.matchBloom(new Bloom(b.getLogBloom()))) {
int txIdx = 0;
for (Transaction tx : b.getTransactionsList()) {
onTransaction(tx, b, txIdx);
txIdx++;
}
}
}
@Override
public void newBlockReceived(Block b) {
if (onNewBlock) onBlock(b);
}
@Override
public void newPendingTx(Transaction tx) {
// TODO add TransactionReceipt for PendingTx
// if (onPendingTx)
}
}
AtomicInteger filterCounter = new AtomicInteger(1);
Map<Integer, Filter> installedFilters = new Hashtable<>();
@Override
public String eth_newFilter(FilterRequest fr) throws Exception {
String str = null;
try {
LogFilter logFilter = new LogFilter();
if (fr.address instanceof String) {
logFilter.withContractAddress(StringHexToByteArray((String) fr.address));
} else if (fr.address instanceof String[]) {
List<byte[]> addr = new ArrayList<>();
for (String s : ((String[]) fr.address)) {
addr.add(StringHexToByteArray(s));
}
logFilter.withContractAddress(addr.toArray(new byte[0][]));
}
if (fr.topics != null) {
for (Object topic : fr.topics) {
if (topic == null) {
logFilter.withTopic(null);
} else if (topic instanceof String) {
logFilter.withTopic(new DataWord(StringHexToByteArray((String) topic)).getData());
} else if (topic instanceof String[]) {
List<byte[]> t = new ArrayList<>();
for (String s : ((String[]) topic)) {
t.add(new DataWord(StringHexToByteArray(s)).getData());
}
logFilter.withTopic(t.toArray(new byte[0][]));
}
}
}
JsonLogFilter filter = new JsonLogFilter(logFilter);
int id = filterCounter.getAndIncrement();
installedFilters.put(id, filter);
Block blockFrom = fr.fromBlock == null ? null : getByJsonBlockId(fr.fromBlock);
Block blockTo = fr.toBlock == null ? null : getByJsonBlockId(fr.toBlock);
if (blockFrom != null) {
// need to add historical data
blockTo = blockTo == null ? blockchain.getBestBlock() : blockTo;
for (long blockNum = blockFrom.getNumber(); blockNum <= blockTo.getNumber(); blockNum++) {
filter.onBlock(blockchain.getBlockByNumber(blockNum));
}
}
// the following is not precisely documented
if ("pending".equalsIgnoreCase(fr.fromBlock) || "pending".equalsIgnoreCase(fr.toBlock)) {
filter.onPendingTx = true;
} else if ("latest".equalsIgnoreCase(fr.fromBlock) || "latest".equalsIgnoreCase(fr.toBlock)) {
filter.onNewBlock = true;
}
return str = toJsonHex(id);
} finally {
if (logger.isDebugEnabled()) logger.debug("eth_newFilter(" + fr + "): " + str);
}
}
@Override
public String eth_newBlockFilter() {
String s = null;
try {
int id = filterCounter.getAndIncrement();
installedFilters.put(id, new NewBlockFilter());
return s = toJsonHex(id);
} finally {
if (logger.isDebugEnabled()) logger.debug("eth_newBlockFilter(): " + s);
}
}
@Override
public String eth_newPendingTransactionFilter() {
String s = null;
try {
int id = filterCounter.getAndIncrement();
installedFilters.put(id, new PendingTransactionFilter());
return s = toJsonHex(id);
} finally {
if (logger.isDebugEnabled()) logger.debug("eth_newPendingTransactionFilter(): " + s);
}
}
@Override
public boolean eth_uninstallFilter(String id) {
Boolean s = null;
try {
if (id == null) return false;
return s = installedFilters.remove(StringHexToBigInteger(id).intValue()) != null;
} finally {
if (logger.isDebugEnabled()) logger.debug("eth_uninstallFilter(" + id + "): " + s);
}
}
@Override
public Object[] eth_getFilterChanges(String id) {
Object[] s = null;
try {
Filter filter = installedFilters.get(StringHexToBigInteger(id).intValue());
if (filter == null) return null;
return s = filter.poll();
} finally {
if (logger.isDebugEnabled()) logger.debug("eth_getFilterChanges(" + id + "): " + Arrays.toString(s));
}
}
@Override
public Object[] eth_getFilterLogs(String id) {
logger.debug("eth_getFilterLogs ...");
return eth_getFilterChanges(id);
}
@Override
public Object[] eth_getLogs(FilterRequest fr) throws Exception {
logger.debug("eth_getLogs ...");
String id = eth_newFilter(fr);
Object[] ret = eth_getFilterChanges(id);
eth_uninstallFilter(id);
return ret;
}
@Override
public String eth_getWork() {
throw new UnsupportedOperationException("JSON RPC method eth_getWork not implemented yet");
}
@Override
public String eth_submitWork() {
throw new UnsupportedOperationException("JSON RPC method eth_submitWork not implemented yet");
}
@Override
public String eth_submitHashrate() {
throw new UnsupportedOperationException("JSON RPC method eth_submitHashrate not implemented yet");
}
@Override
public String db_putString() {
throw new UnsupportedOperationException("JSON RPC method db_putString not implemented yet");
}
@Override
public String db_getString() {
throw new UnsupportedOperationException("JSON RPC method db_getString not implemented yet");
}
@Override
public String db_putHex() {
throw new UnsupportedOperationException("JSON RPC method db_putHex not implemented yet");
}
@Override
public String db_getHex() {
throw new UnsupportedOperationException("JSON RPC method db_getHex not implemented yet");
}
@Override
public String shh_post() {
throw new UnsupportedOperationException("JSON RPC method shh_post not implemented yet");
}
@Override
public String shh_version() {
throw new UnsupportedOperationException("JSON RPC method shh_version not implemented yet");
}
@Override
public String shh_newIdentity() {
throw new UnsupportedOperationException("JSON RPC method shh_newIdentity not implemented yet");
}
@Override
public String shh_hasIdentity() {
throw new UnsupportedOperationException("JSON RPC method shh_hasIdentity not implemented yet");
}
@Override
public String shh_newGroup() {
throw new UnsupportedOperationException("JSON RPC method shh_newGroup not implemented yet");
}
@Override
public String shh_addToGroup() {
throw new UnsupportedOperationException("JSON RPC method shh_addToGroup not implemented yet");
}
@Override
public String shh_newFilter() {
throw new UnsupportedOperationException("JSON RPC method shh_newFilter not implemented yet");
}
@Override
public String shh_uninstallFilter() {
throw new UnsupportedOperationException("JSON RPC method shh_uninstallFilter not implemented yet");
}
@Override
public String shh_getFilterChanges() {
throw new UnsupportedOperationException("JSON RPC method shh_getFilterChanges not implemented yet");
}
@Override
public String shh_getMessages() {
throw new UnsupportedOperationException("JSON RPC method shh_getMessages not implemented yet");
}
@Override
public boolean admin_addPeer(String s) {
eth.connect(new Node(s));
return true;
}
@Override
public String admin_exportChain() {
throw new UnsupportedOperationException("JSON RPC method admin_exportChain not implemented yet");
}
@Override
public String admin_importChain() {
throw new UnsupportedOperationException("JSON RPC method admin_importChain not implemented yet");
}
@Override
public String admin_sleepBlocks() {
throw new UnsupportedOperationException("JSON RPC method admin_sleepBlocks not implemented yet");
}
@Override
public String admin_verbosity() {
throw new UnsupportedOperationException("JSON RPC method admin_verbosity not implemented yet");
}
@Override
public String admin_setSolc() {
throw new UnsupportedOperationException("JSON RPC method admin_setSolc not implemented yet");
}
@Override
public String admin_startRPC() {
throw new UnsupportedOperationException("JSON RPC method admin_startRPC not implemented yet");
}
@Override
public String admin_stopRPC() {
throw new UnsupportedOperationException("JSON RPC method admin_stopRPC not implemented yet");
}
@Override
public String admin_setGlobalRegistrar() {
throw new UnsupportedOperationException("JSON RPC method admin_setGlobalRegistrar not implemented yet");
}
@Override
public String admin_setHashReg() {
throw new UnsupportedOperationException("JSON RPC method admin_setHashReg not implemented yet");
}
@Override
public String admin_setUrlHint() {
throw new UnsupportedOperationException("JSON RPC method admin_setUrlHint not implemented yet");
}
@Override
public String admin_saveInfo() {
throw new UnsupportedOperationException("JSON RPC method admin_saveInfo not implemented yet");
}
@Override
public String admin_register() {
throw new UnsupportedOperationException("JSON RPC method admin_register not implemented yet");
}
@Override
public String admin_registerUrl() {
throw new UnsupportedOperationException("JSON RPC method admin_registerUrl not implemented yet");
}
@Override
public String admin_startNatSpec() {
throw new UnsupportedOperationException("JSON RPC method admin_startNatSpec not implemented yet");
}
@Override
public String admin_stopNatSpec() {
throw new UnsupportedOperationException("JSON RPC method admin_stopNatSpec not implemented yet");
}
@Override
public String admin_getContractInfo() {
throw new UnsupportedOperationException("JSON RPC method admin_getContractInfo not implemented yet");
}
@Override
public String admin_httpGet() {
throw new UnsupportedOperationException("JSON RPC method admin_httpGet not implemented yet");
}
@Override
public String admin_nodeInfo() {
throw new UnsupportedOperationException("JSON RPC method admin_nodeInfo not implemented yet");
}
@Override
public String admin_peers() {
throw new UnsupportedOperationException("JSON RPC method admin_peers not implemented yet");
}
@Override
public String admin_datadir() {
throw new UnsupportedOperationException("JSON RPC method admin_datadir not implemented yet");
}
@Override
public String net_addPeer() {
throw new UnsupportedOperationException("JSON RPC method net_addPeer not implemented yet");
}
@Override
public boolean miner_start() {
blockMiner.startMining();
return true;
}
@Override
public boolean miner_stop() {
blockMiner.stopMining();
return true;
}
@Override
public boolean miner_setEtherbase(String coinBase) throws Exception {
blockchain.setMinerCoinbase(TypeConverter.StringHexToByteArray(coinBase));
return true;
}
@Override
public boolean miner_setExtra(String data) throws Exception {
blockchain.setMinerExtraData(TypeConverter.StringHexToByteArray(data));
return true;
}
@Override
public boolean miner_setGasPrice(String newMinGasPrice) {
blockMiner.setMinGasPrice(TypeConverter.StringHexToBigInteger(newMinGasPrice));
return true;
}
@Override
public boolean miner_startAutoDAG() {
return false;
}
@Override
public boolean miner_stopAutoDAG() {
return false;
}
@Override
public boolean miner_makeDAG() {
return false;
}
@Override
public String miner_hashrate() {
return "0x01";
}
@Override
public String debug_printBlock() {
throw new UnsupportedOperationException("JSON RPC method debug_printBlock not implemented yet");
}
@Override
public String debug_getBlockRlp() {
throw new UnsupportedOperationException("JSON RPC method debug_getBlockRlp not implemented yet");
}
@Override
public String debug_setHead() {
throw new UnsupportedOperationException("JSON RPC method debug_setHead not implemented yet");
}
@Override
public String debug_processBlock() {
throw new UnsupportedOperationException("JSON RPC method debug_processBlock not implemented yet");
}
@Override
public String debug_seedHash() {
throw new UnsupportedOperationException("JSON RPC method debug_seedHash not implemented yet");
}
@Override
public String debug_dumpBlock() {
throw new UnsupportedOperationException("JSON RPC method debug_dumpBlock not implemented yet");
}
@Override
public String debug_metrics() {
throw new UnsupportedOperationException("JSON RPC method debug_metrics not implemented yet");
}
@Override
public String personal_newAccount(String seed) {
String s = null;
try {
Account account = addAccount(seed);
return s = toJsonHex(account.getAddress());
} finally {
if (logger.isDebugEnabled()) logger.debug("personal_newAccount(*****): " + s);
}
}
@Override
public boolean personal_unlockAccount(String addr, String pass, String duration) {
String s = null;
try {
return true;
} finally {
if (logger.isDebugEnabled()) logger.debug("personal_unlockAccount(" + addr + ", ***, " + duration + "): " + s);
}
}
@Override
public String[] personal_listAccounts() {
String[] ret = new String[accounts.size()];
try {
int i = 0;
for (ByteArrayWrapper addr : accounts.keySet()) {
ret[i++] = toJsonHex(addr.getData());
}
return ret;
} finally {
if (logger.isDebugEnabled()) logger.debug("personal_listAccounts(): " + Arrays.toString(ret));
}
}
}
| |
/*
* Copyright 2015-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.android;
import static org.easymock.EasyMock.createMock;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.replay;
import static org.easymock.EasyMock.verify;
import static org.hamcrest.junit.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals;
import com.facebook.buck.event.BuckEventBusFactory;
import com.facebook.buck.jvm.core.JavaPackageFinder;
import com.facebook.buck.jvm.java.JavaLibraryBuilder;
import com.facebook.buck.jvm.java.JavaLibraryDescription;
import com.facebook.buck.jvm.java.JavacOptions;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.BuildTargetFactory;
import com.facebook.buck.rules.ActionGraph;
import com.facebook.buck.rules.BuildContext;
import com.facebook.buck.rules.BuildRule;
import com.facebook.buck.rules.BuildRuleResolver;
import com.facebook.buck.rules.DefaultTargetNodeToBuildRuleTransformer;
import com.facebook.buck.rules.FakeBuildRule;
import com.facebook.buck.rules.FakeExportDependenciesRule;
import com.facebook.buck.rules.SourcePathResolver;
import com.facebook.buck.rules.SourcePathRuleFinder;
import com.facebook.buck.rules.TargetGraph;
import com.facebook.buck.rules.TargetNode;
import com.facebook.buck.rules.query.Query;
import com.facebook.buck.testutil.TargetGraphFactory;
import com.google.common.base.Suppliers;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSortedSet;
import org.hamcrest.Matchers;
import org.junit.Test;
import java.io.File;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
import java.util.Optional;
public class AndroidLibraryDescriptionTest {
@Test
public void rulesExportedFromDepsBecomeFirstOrderDeps() throws Exception {
BuildRuleResolver resolver =
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer());
SourcePathResolver pathResolver = new SourcePathResolver(new SourcePathRuleFinder(resolver));
FakeBuildRule transitiveExportedRule =
resolver.addToIndex(new FakeBuildRule("//:transitive_exported_rule", pathResolver));
FakeExportDependenciesRule exportedRule =
resolver.addToIndex(
new FakeExportDependenciesRule(
"//:exported_rule",
pathResolver,
transitiveExportedRule));
FakeExportDependenciesRule exportingRule =
resolver.addToIndex(
new FakeExportDependenciesRule("//:exporting_rule", pathResolver, exportedRule));
BuildTarget target = BuildTargetFactory.newInstance("//:rule");
BuildRule javaLibrary = AndroidLibraryBuilder.createBuilder(target)
.addDep(exportingRule.getBuildTarget())
.build(resolver);
assertThat(
javaLibrary.getBuildDeps(),
Matchers.allOf(
Matchers.hasItem(exportedRule),
Matchers.hasItem(transitiveExportedRule)));
}
@Test
public void rulesMatchingDepQueryBecomeFirstOrderDeps() throws Exception {
// Set up target graph: rule -> lib -> sublib -> bottom
TargetNode<JavaLibraryDescription.Arg, JavaLibraryDescription> bottomNode =
JavaLibraryBuilder.createBuilder(
BuildTargetFactory.newInstance("//:bottom"))
.build();
TargetNode<JavaLibraryDescription.Arg, JavaLibraryDescription> sublibNode =
JavaLibraryBuilder.createBuilder(
BuildTargetFactory.newInstance("//:sublib"))
.addDep(bottomNode.getBuildTarget())
.build();
TargetNode<JavaLibraryDescription.Arg, JavaLibraryDescription> libNode =
JavaLibraryBuilder.createBuilder(
BuildTargetFactory.newInstance("//:lib"))
.addDep(sublibNode.getBuildTarget())
.build();
TargetGraph targetGraph = TargetGraphFactory.newInstance(
bottomNode,
libNode,
sublibNode);
BuildRuleResolver resolver =
new BuildRuleResolver(targetGraph, new DefaultTargetNodeToBuildRuleTransformer());
SourcePathResolver pathResolver = new SourcePathResolver(new SourcePathRuleFinder(resolver));
FakeBuildRule bottomRule = resolver.addToIndex(
new FakeBuildRule(bottomNode.getBuildTarget(), pathResolver));
FakeBuildRule sublibRule = resolver.addToIndex(
new FakeBuildRule(
sublibNode.getBuildTarget(),
pathResolver,
ImmutableSortedSet.of(bottomRule)));
FakeBuildRule libRule = resolver.addToIndex(
new FakeBuildRule(
libNode.getBuildTarget(),
pathResolver,
ImmutableSortedSet.of(sublibRule)));
BuildTarget target = BuildTargetFactory.newInstance("//:rule");
BuildRule javaLibrary = AndroidLibraryBuilder.createBuilder(target)
.addDep(libNode.getBuildTarget())
.setDepsQuery(Query.of("filter('.*lib', deps($declared_deps))"))
.build(resolver, targetGraph);
assertThat(
javaLibrary.getBuildDeps(),
Matchers.hasItems(libRule, sublibRule));
// The bottom rule should be filtered since it does not match the regex
assertThat(
javaLibrary.getBuildDeps(),
Matchers.not(Matchers.hasItem(bottomRule)));
}
@Test
public void rulesExportedFromProvidedDepsBecomeFirstOrderDeps() throws Exception {
BuildRuleResolver resolver =
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer());
SourcePathResolver pathResolver = new SourcePathResolver(new SourcePathRuleFinder(resolver));
FakeBuildRule transitiveExportedRule =
resolver.addToIndex(new FakeBuildRule("//:transitive_exported_rule", pathResolver));
FakeExportDependenciesRule exportedRule =
resolver.addToIndex(
new FakeExportDependenciesRule(
"//:exported_rule",
pathResolver,
transitiveExportedRule));
FakeExportDependenciesRule exportingRule =
resolver.addToIndex(
new FakeExportDependenciesRule("//:exporting_rule", pathResolver, exportedRule));
BuildTarget target = BuildTargetFactory.newInstance("//:rule");
BuildRule javaLibrary = AndroidLibraryBuilder.createBuilder(target)
.addProvidedDep(exportingRule.getBuildTarget())
.build(resolver);
assertThat(
javaLibrary.getBuildDeps(),
Matchers.allOf(
Matchers.hasItem(exportedRule),
Matchers.hasItem(transitiveExportedRule)));
}
@Test
public void bootClasspathAppenderAddsLibsFromAndroidPlatformTarget() {
BuildContext.Builder builder = BuildContext.builder();
// Set to non-null values.
builder.setActionGraph(createMock(ActionGraph.class));
builder.setSourcePathResolver(createMock(SourcePathResolver.class));
builder.setJavaPackageFinder(createMock(JavaPackageFinder.class));
builder.setEventBus(BuckEventBusFactory.newInstance());
AndroidPlatformTarget androidPlatformTarget = createMock(AndroidPlatformTarget.class);
List<Path> entries = ImmutableList.of(
Paths.get("add-ons/addon-google_apis-google-15/libs/effects.jar"),
Paths.get("add-ons/addon-google_apis-google-15/libs/maps.jar"),
Paths.get("add-ons/addon-google_apis-google-15/libs/usb.jar"));
expect(androidPlatformTarget.getBootclasspathEntries()).andReturn(entries);
replay(androidPlatformTarget);
builder.setAndroidPlatformTargetSupplier(Suppliers.ofInstance(androidPlatformTarget));
BootClasspathAppender appender = new BootClasspathAppender();
JavacOptions options = JavacOptions.builder()
.setSourceLevel("1.7")
.setTargetLevel("1.7")
.build();
JavacOptions updated = appender.amend(options, builder.build());
assertEquals(
Optional.of(
("add-ons/addon-google_apis-google-15/libs/effects.jar" + File.pathSeparatorChar +
"add-ons/addon-google_apis-google-15/libs/maps.jar" + File.pathSeparatorChar +
"add-ons/addon-google_apis-google-15/libs/usb.jar").replace("/", File.separator)),
updated.getBootclasspath());
verify(androidPlatformTarget);
}
@Test
public void testClasspathContainsOnlyJavaTargets() throws Exception {
TargetNode<AndroidResourceDescription.Arg, AndroidResourceDescription> resourceRule =
AndroidResourceBuilder.createBuilder(
BuildTargetFactory.newInstance("//:res"))
.build();
TargetGraph targetGraph = TargetGraphFactory.newInstance(resourceRule);
BuildRuleResolver resolver =
new BuildRuleResolver(targetGraph, new DefaultTargetNodeToBuildRuleTransformer());
SourcePathResolver pathResolver = new SourcePathResolver(new SourcePathRuleFinder(resolver));
resolver.addToIndex(new FakeBuildRule(resourceRule.getBuildTarget(), pathResolver));
AndroidLibrary androidLibrary = AndroidLibraryBuilder.createBuilder(
BuildTargetFactory.newInstance("//:android_lib"))
.addDep(resourceRule.getBuildTarget())
.build(resolver, targetGraph);
assertThat(
androidLibrary.getCompileTimeClasspathSourcePaths(),
Matchers.empty());
}
}
| |
package com.orange.util.algorithm.path.astar;
import com.orange.util.adt.list.ShiftList;
import com.orange.util.adt.map.LongSparseArray;
import com.orange.util.adt.queue.IQueue;
import com.orange.util.adt.queue.SortedQueue;
import com.orange.util.adt.spatial.bounds.util.IntBoundsUtils;
import com.orange.util.algorithm.path.ICostFunction;
import com.orange.util.algorithm.path.IPathFinder;
import com.orange.util.algorithm.path.IPathFinderMap;
import com.orange.util.algorithm.path.Path;
/**
* TODO Nodes could be recycle in a pool.
*
* (c) OrangeGame 2012
*
*
* @author OrangeGame <OGEngine@orangegame.cn>
*/
public class AStarPathFinder<T> implements IPathFinder<T> {
// ===========================================================
// Constants
// ===========================================================
// ===========================================================
// Fields
// ===========================================================
// ===========================================================
// Constructors
// ===========================================================
// ===========================================================
// Getter & Setter
// ===========================================================
// ===========================================================
// Methods for/from SuperClass/Interfaces
// ===========================================================
@Override
public Path findPath(final IPathFinderMap<T> pPathFinderMap, final int pXMin, final int pYMin, final int pXMax, final int pYMax, final T pEntity, final int pFromX, final int pFromY, final int pToX, final int pToY, final boolean pAllowDiagonal, final IAStarHeuristic<T> pAStarHeuristic, final ICostFunction<T> pCostFunction) {
return this.findPath(pPathFinderMap, pXMin, pYMin, pXMax, pYMax, pEntity, pFromX, pFromY, pToX, pToY, pAllowDiagonal, pAStarHeuristic, pCostFunction, Float.MAX_VALUE);
}
@Override
public Path findPath(final IPathFinderMap<T> pPathFinderMap, final int pXMin, final int pYMin, final int pXMax, final int pYMax, final T pEntity, final int pFromX, final int pFromY, final int pToX, final int pToY, final boolean pAllowDiagonal, final IAStarHeuristic<T> pAStarHeuristic, final ICostFunction<T> pCostFunction, final float pMaxCost) {
return this.findPath(pPathFinderMap, pXMin, pYMin, pXMax, pYMax, pEntity, pFromX, pFromY, pToX, pToY, pAllowDiagonal, pAStarHeuristic, pCostFunction, pMaxCost, null);
}
@Override
public Path findPath(final IPathFinderMap<T> pPathFinderMap, final int pXMin, final int pYMin, final int pXMax, final int pYMax, final T pEntity, final int pFromX, final int pFromY, final int pToX, final int pToY, final boolean pAllowDiagonal, final IAStarHeuristic<T> pAStarHeuristic, final ICostFunction<T> pCostFunction, final float pMaxCost, final IPathFinderListener<T> pPathFinderListener) {
if(((pFromX == pToX) && (pFromY == pToY)) || pPathFinderMap.isBlocked(pFromX, pFromY, pEntity) || pPathFinderMap.isBlocked(pToX, pToY, pEntity)) {
return null;
}
/* Drag some fields to local variables. */
final Node fromNode = new Node(pFromX, pFromY, pAStarHeuristic.getExpectedRestCost(pPathFinderMap, pEntity, pFromX, pFromY, pToX, pToY));
final long fromNodeID = fromNode.mID;
final long toNodeID = Node.calculateID(pToX, pToY);
final LongSparseArray<Node> visitedNodes = new LongSparseArray<Node>();
final LongSparseArray<Node> openNodes = new LongSparseArray<Node>();
final IQueue<Node> sortedOpenNodes = new SortedQueue<Node>(new ShiftList<Node>());
final boolean allowDiagonalMovement = pAllowDiagonal;
/* Initialize algorithm. */
openNodes.put(fromNodeID, fromNode);
sortedOpenNodes.enter(fromNode);
Node currentNode = null;
while(openNodes.size() > 0) {
/* The first Node in the open list is the one with the lowest cost. */
currentNode = sortedOpenNodes.poll();
final long currentNodeID = currentNode.mID;
if(currentNodeID == toNodeID) {
break;
}
visitedNodes.put(currentNodeID, currentNode);
/* Loop over all neighbors of this position. */
for(int dX = -1; dX <= 1; dX++) {
for(int dY = -1; dY <= 1; dY++) {
if((dX == 0) && (dY == 0)) {
continue;
}
if(!allowDiagonalMovement && (dX != 0) && (dY != 0)) {
continue;
}
final int neighborNodeX = dX + currentNode.mX;
final int neighborNodeY = dY + currentNode.mY;
final long neighborNodeID = Node.calculateID(neighborNodeX, neighborNodeY);
if(!IntBoundsUtils.contains(pXMin, pYMin, pXMax, pYMax, neighborNodeX, neighborNodeY) || pPathFinderMap.isBlocked(neighborNodeX, neighborNodeY, pEntity)) {
continue;
}
if(visitedNodes.indexOfKey(neighborNodeID) >= 0) {
continue;
}
Node neighborNode = openNodes.get(neighborNodeID);
final boolean neighborNodeIsNew;
/* Check if neighbor exists. */
if(neighborNode == null) {
neighborNodeIsNew = true;
neighborNode = new Node(neighborNodeX, neighborNodeY, pAStarHeuristic.getExpectedRestCost(pPathFinderMap, pEntity, neighborNodeX, neighborNodeY, pToX, pToY));
} else {
neighborNodeIsNew = false;
}
/* Update cost of neighbor as cost of current plus step from current to neighbor. */
final float costFromCurrentToNeigbor = pCostFunction.getCost(pPathFinderMap, currentNode.mX, currentNode.mY, neighborNodeX, neighborNodeY, pEntity);
final float neighborNodeCost = currentNode.mCost + costFromCurrentToNeigbor;
if(neighborNodeCost > pMaxCost) {
/* Too expensive -> remove if isn't a new node. */
if(!neighborNodeIsNew) {
openNodes.remove(neighborNodeID);
}
} else {
neighborNode.setParent(currentNode, costFromCurrentToNeigbor);
if(neighborNodeIsNew) {
openNodes.put(neighborNodeID, neighborNode);
} else {
/* Remove so that re-insertion puts it to the correct spot. */
sortedOpenNodes.remove(neighborNode);
}
sortedOpenNodes.enter(neighborNode);
if(pPathFinderListener != null) {
pPathFinderListener.onVisited(pEntity, neighborNodeX, neighborNodeY);
}
}
}
}
}
/* Cleanup. */
// TODO We could just let the GC do its work.
visitedNodes.clear();
openNodes.clear();
sortedOpenNodes.clear();
/* Check if a path was found. */
if(currentNode.mID != toNodeID) {
return null;
}
/* Calculate path length. */
int length = 1;
Node tmp = currentNode;
while(tmp.mID != fromNodeID) {
tmp = tmp.mParent;
length++;
}
/* Traceback path. */
final Path path = new Path(length);
int index = length - 1;
tmp = currentNode;
while(tmp.mID != fromNodeID) {
path.set(index, tmp.mX, tmp.mY);
tmp = tmp.mParent;
index--;
}
path.set(0, pFromX, pFromY);
return path;
}
// ===========================================================
// Methods
// ===========================================================
// ===========================================================
// Inner and Anonymous Classes
// ===========================================================
private static final class Node implements Comparable<Node> {
// ===========================================================
// Constants
// ===========================================================
// ===========================================================
// Fields
// ===========================================================
/* package */ Node mParent;
/* package */ final int mX;
/* package */ final int mY;
/* package */ final long mID;
/* package */ final float mExpectedRestCost;
/* package */ float mCost;
/* package */ float mTotalCost;
// ===========================================================
// Constructors
// ===========================================================
public Node(final int pX, final int pY, final float pExpectedRestCost) {
this.mX = pX;
this.mY = pY;
this.mExpectedRestCost = pExpectedRestCost;
this.mID = Node.calculateID(pX, pY);
}
// ===========================================================
// Getter & Setter
// ===========================================================
public void setParent(final Node pParent, final float pCost) {
this.mParent = pParent;
this.mCost = pCost;
this.mTotalCost = pCost + this.mExpectedRestCost;
}
// ===========================================================
// Methods for/from SuperClass/Interfaces
// ===========================================================
@Override
public int compareTo(final Node pNode) {
final float diff = this.mTotalCost - pNode.mTotalCost;
if(diff > 0) {
return 1;
} else if(diff < 0) {
return -1;
} else {
return 0;
}
}
@Override
public boolean equals(final Object pOther) {
if(this == pOther) {
return true;
} else if(pOther == null) {
return false;
} else if(this.getClass() != pOther.getClass()) {
return false;
}
return this.equals((Node)pOther);
}
@Override
public String toString() {
return this.getClass().getSimpleName() + " [x=" + this.mX + ", y=" + this.mY + "]";
}
// ===========================================================
// Methods
// ===========================================================
public static long calculateID(final int pX, final int pY) {
return (((long)pX) << 32) | pY;
}
public boolean equals(final Node pNode) {
return this.mID == pNode.mID;
}
// ===========================================================
// Inner and Anonymous Classes
// ===========================================================
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.aws2.sqs;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.UUID;
import org.apache.camel.Exchange;
import org.apache.camel.Message;
import org.apache.camel.NoFactoryAvailableException;
import org.apache.camel.spi.HeaderFilterStrategy;
import org.apache.camel.support.DefaultProducer;
import org.apache.camel.util.ObjectHelper;
import org.apache.camel.util.URISupport;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import software.amazon.awssdk.core.SdkBytes;
import software.amazon.awssdk.services.sqs.SqsClient;
import software.amazon.awssdk.services.sqs.model.DeleteMessageRequest;
import software.amazon.awssdk.services.sqs.model.DeleteMessageResponse;
import software.amazon.awssdk.services.sqs.model.ListQueuesRequest;
import software.amazon.awssdk.services.sqs.model.ListQueuesResponse;
import software.amazon.awssdk.services.sqs.model.MessageAttributeValue;
import software.amazon.awssdk.services.sqs.model.PurgeQueueRequest;
import software.amazon.awssdk.services.sqs.model.PurgeQueueResponse;
import software.amazon.awssdk.services.sqs.model.SendMessageBatchRequest;
import software.amazon.awssdk.services.sqs.model.SendMessageBatchRequestEntry;
import software.amazon.awssdk.services.sqs.model.SendMessageBatchResponse;
import software.amazon.awssdk.services.sqs.model.SendMessageRequest;
import software.amazon.awssdk.services.sqs.model.SendMessageResponse;
/**
* A Producer which sends messages to the Amazon Web Service Simple Queue Service
* <a href="http://aws.amazon.com/sqs/">AWS SQS</a>
*/
public class Sqs2Producer extends DefaultProducer {
private static final Logger LOG = LoggerFactory.getLogger(Sqs2Producer.class);
private transient String sqsProducerToString;
public Sqs2Producer(Sqs2Endpoint endpoint) throws NoFactoryAvailableException {
super(endpoint);
if (endpoint.getConfiguration().isFifoQueue()
&& ObjectHelper.isEmpty(getEndpoint().getConfiguration().getMessageGroupIdStrategy())) {
throw new IllegalArgumentException("messageGroupIdStrategy must be set for FIFO queues.");
}
}
@Override
public void process(Exchange exchange) throws Exception {
Sqs2Operations operation = determineOperation(exchange);
if (ObjectHelper.isEmpty(operation)) {
processSingleMessage(exchange);
} else {
switch (operation) {
case sendBatchMessage:
sendBatchMessage(getClient(), exchange);
break;
case deleteMessage:
deleteMessage(getClient(), exchange);
break;
case listQueues:
listQueues(getClient(), exchange);
break;
case purgeQueue:
purgeQueue(getClient(), exchange);
break;
default:
throw new IllegalArgumentException("Unsupported operation");
}
}
}
public void processSingleMessage(final Exchange exchange) {
String body = exchange.getIn().getBody(String.class);
SendMessageRequest.Builder request = SendMessageRequest.builder().queueUrl(getQueueUrl()).messageBody(body);
request.messageAttributes(translateAttributes(exchange.getIn().getHeaders(), exchange));
addDelay(request, exchange);
configureFifoAttributes(request, exchange);
LOG.trace("Sending request [{}] from exchange [{}]...", request, exchange);
SendMessageResponse result = getClient().sendMessage(request.build());
LOG.trace("Received result [{}]", result);
Message message = getMessageForResponse(exchange);
message.setHeader(Sqs2Constants.MESSAGE_ID, result.messageId());
message.setHeader(Sqs2Constants.MD5_OF_BODY, result.md5OfMessageBody());
}
private void sendBatchMessage(SqsClient amazonSQS, Exchange exchange) {
SendMessageBatchRequest.Builder request = SendMessageBatchRequest.builder().queueUrl(getQueueUrl());
Collection<SendMessageBatchRequestEntry> entries = new ArrayList<>();
if (exchange.getIn().getBody() instanceof Iterable) {
Iterable c = exchange.getIn().getBody(Iterable.class);
for (Object o : c) {
String object = (String) o;
SendMessageBatchRequestEntry.Builder entry = SendMessageBatchRequestEntry.builder();
entry.id(UUID.randomUUID().toString());
entry.messageAttributes(translateAttributes(exchange.getIn().getHeaders(), exchange));
entry.messageBody(object);
addDelay(entry, exchange);
configureFifoAttributes(entry, exchange);
entries.add(entry.build());
}
request.entries(entries);
SendMessageBatchResponse result = amazonSQS.sendMessageBatch(request.build());
Message message = getMessageForResponse(exchange);
message.setBody(result);
} else {
SendMessageBatchRequest req = exchange.getIn().getBody(SendMessageBatchRequest.class);
SendMessageBatchResponse result = amazonSQS.sendMessageBatch(req);
Message message = getMessageForResponse(exchange);
message.setBody(result);
}
}
private void deleteMessage(SqsClient amazonSQS, Exchange exchange) {
String receiptHandle = exchange.getIn().getHeader(Sqs2Constants.RECEIPT_HANDLE, String.class);
DeleteMessageRequest.Builder request = DeleteMessageRequest.builder();
request.queueUrl(getQueueUrl());
if (ObjectHelper.isEmpty(receiptHandle)) {
throw new IllegalArgumentException("Receipt Handle must be specified for the operation deleteMessage");
}
request.receiptHandle(receiptHandle);
DeleteMessageResponse result = amazonSQS.deleteMessage(request.build());
Message message = getMessageForResponse(exchange);
message.setBody(result);
}
private void listQueues(SqsClient amazonSQS, Exchange exchange) {
ListQueuesRequest.Builder request = ListQueuesRequest.builder();
if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(Sqs2Constants.SQS_QUEUE_PREFIX))) {
request.queueNamePrefix(exchange.getIn().getHeader(Sqs2Constants.SQS_QUEUE_PREFIX, String.class));
}
ListQueuesResponse result = amazonSQS.listQueues(request.build());
Message message = getMessageForResponse(exchange);
message.setBody(result);
}
private void purgeQueue(SqsClient amazonSQS, Exchange exchange) {
PurgeQueueRequest.Builder request = PurgeQueueRequest.builder();
if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(Sqs2Constants.SQS_QUEUE_PREFIX))) {
request.queueUrl(getQueueUrl());
}
PurgeQueueResponse result = amazonSQS.purgeQueue(request.build());
Message message = getMessageForResponse(exchange);
message.setBody(result);
}
private void configureFifoAttributes(SendMessageRequest.Builder request, Exchange exchange) {
if (getEndpoint().getConfiguration().isFifoQueue()) {
// use strategies
if (ObjectHelper.isNotEmpty(getEndpoint().getConfiguration().getMessageGroupIdStrategy())) {
MessageGroupIdStrategy messageGroupIdStrategy = getEndpoint().getConfiguration().getMessageGroupIdStrategy();
String messageGroupId = messageGroupIdStrategy.getMessageGroupId(exchange);
request.messageGroupId(messageGroupId);
}
if (ObjectHelper.isNotEmpty(getEndpoint().getConfiguration().getMessageDeduplicationIdStrategy())) {
MessageDeduplicationIdStrategy messageDeduplicationIdStrategy
= getEndpoint().getConfiguration().getMessageDeduplicationIdStrategy();
String messageDeduplicationId = messageDeduplicationIdStrategy.getMessageDeduplicationId(exchange);
request.messageDeduplicationId(messageDeduplicationId);
}
}
}
private void configureFifoAttributes(SendMessageBatchRequestEntry.Builder request, Exchange exchange) {
if (getEndpoint().getConfiguration().isFifoQueue()) {
// use strategies
MessageGroupIdStrategy messageGroupIdStrategy = getEndpoint().getConfiguration().getMessageGroupIdStrategy();
String messageGroupId = messageGroupIdStrategy.getMessageGroupId(exchange);
request.messageGroupId(messageGroupId);
MessageDeduplicationIdStrategy messageDeduplicationIdStrategy
= getEndpoint().getConfiguration().getMessageDeduplicationIdStrategy();
String messageDeduplicationId = messageDeduplicationIdStrategy.getMessageDeduplicationId(exchange);
request.messageDeduplicationId(messageDeduplicationId);
}
}
private void addDelay(SendMessageRequest.Builder request, Exchange exchange) {
Integer headerValue = exchange.getIn().getHeader(Sqs2Constants.DELAY_HEADER, Integer.class);
Integer delayValue;
if (headerValue == null) {
LOG.trace("Using the config delay");
delayValue = getEndpoint().getConfiguration().getDelaySeconds();
} else {
LOG.trace("Using the header delay");
delayValue = headerValue;
}
LOG.trace("found delay: {}", delayValue);
request.delaySeconds(delayValue == null ? Integer.valueOf(0) : delayValue);
}
private void addDelay(SendMessageBatchRequestEntry.Builder request, Exchange exchange) {
Integer headerValue = exchange.getIn().getHeader(Sqs2Constants.DELAY_HEADER, Integer.class);
Integer delayValue;
if (headerValue == null) {
LOG.trace("Using the config delay");
delayValue = getEndpoint().getConfiguration().getDelaySeconds();
} else {
LOG.trace("Using the header delay");
delayValue = headerValue;
}
LOG.trace("found delay: {}", delayValue);
request.delaySeconds(delayValue == null ? Integer.valueOf(0) : delayValue);
}
protected SqsClient getClient() {
return getEndpoint().getClient();
}
protected String getQueueUrl() {
return getEndpoint().getQueueUrl();
}
protected Sqs2Configuration getConfiguration() {
return getEndpoint().getConfiguration();
}
@Override
public Sqs2Endpoint getEndpoint() {
return (Sqs2Endpoint) super.getEndpoint();
}
@Override
public String toString() {
if (sqsProducerToString == null) {
sqsProducerToString = "SqsProducer[" + URISupport.sanitizeUri(getEndpoint().getEndpointUri()) + "]";
}
return sqsProducerToString;
}
Map<String, MessageAttributeValue> translateAttributes(Map<String, Object> headers, Exchange exchange) {
Map<String, MessageAttributeValue> result = new HashMap<>();
HeaderFilterStrategy headerFilterStrategy = getEndpoint().getHeaderFilterStrategy();
for (Entry<String, Object> entry : headers.entrySet()) {
// only put the message header which is not filtered into the
// message attribute
if (!headerFilterStrategy.applyFilterToCamelHeaders(entry.getKey(), entry.getValue(), exchange)) {
Object value = entry.getValue();
if (value instanceof String && !((String) value).isEmpty()) {
MessageAttributeValue.Builder mav = MessageAttributeValue.builder();
mav.dataType("String");
mav.stringValue((String) value);
result.put(entry.getKey(), mav.build());
} else if (value instanceof ByteBuffer) {
MessageAttributeValue.Builder mav = MessageAttributeValue.builder();
mav.dataType("Binary");
mav.binaryValue(SdkBytes.fromByteBuffer((ByteBuffer) value));
result.put(entry.getKey(), mav.build());
} else if (value instanceof Boolean) {
MessageAttributeValue.Builder mav = MessageAttributeValue.builder();
mav.dataType("Number.Boolean");
mav.stringValue(((Boolean) value) ? "1" : "0");
result.put(entry.getKey(), mav.build());
} else if (value instanceof Number) {
MessageAttributeValue.Builder mav = MessageAttributeValue.builder();
final String dataType;
if (value instanceof Integer) {
dataType = "Number.int";
} else if (value instanceof Byte) {
dataType = "Number.byte";
} else if (value instanceof Double) {
dataType = "Number.double";
} else if (value instanceof Float) {
dataType = "Number.float";
} else if (value instanceof Long) {
dataType = "Number.long";
} else if (value instanceof Short) {
dataType = "Number.short";
} else {
dataType = "Number";
}
mav.dataType(dataType);
mav.stringValue(value.toString());
result.put(entry.getKey(), mav.build());
} else if (value instanceof Date) {
MessageAttributeValue.Builder mav = MessageAttributeValue.builder();
mav.dataType("String");
mav.stringValue(value.toString());
result.put(entry.getKey(), mav.build());
} else {
// cannot translate the message header to message attribute
// value
LOG.warn("Cannot put the message header key={}, value={} into Sqs MessageAttribute", entry.getKey(),
entry.getValue());
}
}
}
return result;
}
public static Message getMessageForResponse(final Exchange exchange) {
return exchange.getMessage();
}
private Sqs2Operations determineOperation(Exchange exchange) {
Sqs2Operations operation = exchange.getIn().getHeader(Sqs2Constants.SQS_OPERATION, Sqs2Operations.class);
if (operation == null) {
operation = getConfiguration().getOperation();
}
return operation;
}
}
| |
/**
* <copyright>
*
* Copyright (c) 2010 SAP AG.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Reiner Hille-Doering (SAP AG) - initial API and implementation and/or initial documentation
*
* </copyright>
*/
package org.eclipse.securebpmn2.impl;
import java.util.Collection;
import java.util.List;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
import org.eclipse.emf.ecore.util.EObjectWithInverseResolvingEList;
import org.eclipse.emf.ecore.util.InternalEList;
import org.eclipse.securebpmn2.Group;
import org.eclipse.securebpmn2.Securebpmn2Package;
import org.eclipse.securebpmn2.Subject;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Group</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* <ul>
* <li>{@link org.eclipse.securebpmn2.impl.GroupImpl#getGroupName <em>Group Name</em>}</li>
* <li>{@link org.eclipse.securebpmn2.impl.GroupImpl#getSubjects <em>Subjects</em>}</li>
* </ul>
* </p>
*
* @generated
*/
public class GroupImpl extends SubjectImpl implements Group {
/**
* The default value of the '{@link #getGroupName() <em>Group Name</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getGroupName()
* @generated
* @ordered
*/
protected static final String GROUP_NAME_EDEFAULT = null;
/**
* The cached value of the '{@link #getGroupName() <em>Group Name</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getGroupName()
* @generated
* @ordered
*/
protected String groupName = GROUP_NAME_EDEFAULT;
/**
* The cached value of the '{@link #getSubjects() <em>Subjects</em>}' reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getSubjects()
* @generated
* @ordered
*/
protected EList<Subject> subjects;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected GroupImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return Securebpmn2Package.Literals.GROUP;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getGroupName() {
return groupName;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setGroupName(String newGroupName) {
String oldGroupName = groupName;
groupName = newGroupName;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET,
Securebpmn2Package.GROUP__GROUP_NAME, oldGroupName,
groupName));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public List<Subject> getSubjects() {
if (subjects == null) {
subjects = new EObjectWithInverseResolvingEList.ManyInverse<Subject>(
Subject.class, this, Securebpmn2Package.GROUP__SUBJECTS,
Securebpmn2Package.SUBJECT__GROUPS);
}
return subjects;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@SuppressWarnings("unchecked")
@Override
public NotificationChain eInverseAdd(InternalEObject otherEnd,
int featureID, NotificationChain msgs) {
switch (featureID) {
case Securebpmn2Package.GROUP__SUBJECTS:
return ((InternalEList<InternalEObject>) (InternalEList<?>) getSubjects())
.basicAdd(otherEnd, msgs);
}
return super.eInverseAdd(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd,
int featureID, NotificationChain msgs) {
switch (featureID) {
case Securebpmn2Package.GROUP__SUBJECTS:
return ((InternalEList<?>) getSubjects()).basicRemove(otherEnd,
msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case Securebpmn2Package.GROUP__GROUP_NAME:
return getGroupName();
case Securebpmn2Package.GROUP__SUBJECTS:
return getSubjects();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@SuppressWarnings("unchecked")
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case Securebpmn2Package.GROUP__GROUP_NAME:
setGroupName((String) newValue);
return;
case Securebpmn2Package.GROUP__SUBJECTS:
getSubjects().clear();
getSubjects().addAll((Collection<? extends Subject>) newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case Securebpmn2Package.GROUP__GROUP_NAME:
setGroupName(GROUP_NAME_EDEFAULT);
return;
case Securebpmn2Package.GROUP__SUBJECTS:
getSubjects().clear();
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case Securebpmn2Package.GROUP__GROUP_NAME:
return GROUP_NAME_EDEFAULT == null ? groupName != null
: !GROUP_NAME_EDEFAULT.equals(groupName);
case Securebpmn2Package.GROUP__SUBJECTS:
return subjects != null && !subjects.isEmpty();
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString() {
if (eIsProxy())
return super.toString();
StringBuffer result = new StringBuffer(super.toString());
result.append(" (groupName: ");
result.append(groupName);
result.append(')');
return result.toString();
}
} //GroupImpl
| |
/*L
* Copyright SAIC
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/i-spy/LICENSE.txt for details.
*/
package gov.nih.nci.ispy.web.struts.form;
import gov.nih.nci.caintegrator.application.lists.ListType;
import gov.nih.nci.caintegrator.application.lists.UserListBeanHelper;
import gov.nih.nci.caintegrator.enumeration.MultiGroupComparisonAdjustmentType;
import gov.nih.nci.caintegrator.enumeration.StatisticalMethodType;
import gov.nih.nci.ispy.service.common.TimepointType;
import gov.nih.nci.ispy.util.ispyConstants;
import gov.nih.nci.ispy.web.helper.ClinicalGroupRetriever;
import gov.nih.nci.ispy.web.helper.UIFormValidator;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import org.apache.log4j.Logger;
import org.apache.struts.action.ActionErrors;
import org.apache.struts.action.ActionForm;
import org.apache.struts.action.ActionMapping;
import org.apache.struts.action.ActionMessage;
import org.apache.struts.util.LabelValueBean;
public class GpIntegrationForm extends ActionForm {
// -------------INSTANCE VARIABLES-----------------------------//
private static Logger logger = Logger.getLogger(BaseForm.class);
private String timepoint;
private String [] existingGroups;
private List existingGroupsList;
private Collection timepointCollection = new ArrayList();
private String [] selectedGroups;
private String arrayPlatform = "";
private String analysisResultName = "";
private String geneSetName = "";
private List geneSetNameList;
public GpIntegrationForm(){
// Create Lookups for ClassComparisonForm screens
for (TimepointType timepointType : TimepointType.values()){
timepointCollection.add(new LabelValueBean(timepointType.toString(),timepointType.name()));
}
}
/**
* @return Returns the existingGroups.
*/
public String[] getExistingGroups() {
return existingGroups;
}
/**
* @param existingGroups The existingGroups to set.
*/
public void setExistingGroups(String [] existingGroups) {
this.existingGroups = existingGroups;
}
/**
* @return Returns the existingGroupsList.
*/
public List getExistingGroupsList() {
return this.existingGroupsList;
}
/**
* @param existingGroupsList The existingGroupsList to set.
*/
public void setExistingGroupsList(List existingGroupsList) {
this.existingGroupsList = existingGroupsList;
}
/**
* @return Returns the selectedGroups.
*/
public String [] getSelectedGroups() {
return selectedGroups;
}
/**
* @param selectedGroups The selectedGroups to set.
*/
public void setSelectedGroups(String [] selectedGroups) {
this.selectedGroups = selectedGroups;
}
/**
* @return Returns the arrayPlatform.
*/
public String getArrayPlatform() {
return arrayPlatform;
}
/**
* @param arrayPlatform The arrayPlatform to set.
*/
public void setArrayPlatform(String arrayPlatform) {
this.arrayPlatform = arrayPlatform;
}
/**
* @return Returns the timepointCollection.
*/
public Collection getTimepointCollection() {
return timepointCollection;
}
/**
* @param timepointCollection The timepointCollection to set.
*/
public void setTimepointCollection(Collection timepointCollection) {
this.timepointCollection = timepointCollection;
}
public String getAnalysisResultName() {
return analysisResultName;
}
public void setAnalysisResultName(String analysisResultName) {
this.analysisResultName = analysisResultName;
}
/**
* Method validate
*
* @param ActionMapping
* mapping
* @param HttpServletRequest
* request
* @return ActionErrors
*/
public ActionErrors validate(ActionMapping mapping,
HttpServletRequest request) {
ActionErrors errors = new ActionErrors();
if (this.selectedGroups == null || this.selectedGroups.length < 2)
errors.add("selectedGroups1", new ActionMessage(
"gov.nih.nci.nautilus.ui.struts.form.groups.two.or.more.error"));
//User must select exactly 2 comparison Groups
//errors = UIFormValidator.validateSelectedGroups(selectedGroups, timepointRange, timepointBaseAcross, timepointComparison, request.getSession(), errors);
if (errors.size() > 0){
ClinicalGroupRetriever clinicalGroupRetriever = new ClinicalGroupRetriever(request.getSession());
this.setExistingGroupsList(clinicalGroupRetriever.getClinicalGroupsCollection());
UserListBeanHelper listHelper = new UserListBeanHelper(request.getSession());
//fetch the users gene groups populate the dropdown
List<String> names = (List<String>) listHelper.getGenericListNames(ListType.Gene);
List<LabelValueBean> gsNameList = new ArrayList<LabelValueBean>();
for(String listName : names){
gsNameList.add(new LabelValueBean(listName,listName));
}
this.setGeneSetNameList(gsNameList);
}
return errors;
}
/**
* Method reset
*
* @param ActionMapping
* mapping
* @param HttpServletRequest
* request
*/
public void reset(ActionMapping mapping, HttpServletRequest request) {
arrayPlatform = "";
}
public String getGeneSetName() {
return geneSetName;
}
public void setGeneSetName(String geneSetName) {
this.geneSetName = geneSetName;
}
public List getGeneSetNameList() {
return geneSetNameList;
}
public void setGeneSetNameList(List geneSetNameList) {
this.geneSetNameList = geneSetNameList;
}
public String getTimepoint() {
return timepoint;
}
public void setTimepoint(String timepoint) {
this.timepoint = timepoint;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.utils.obs;
import java.util.Arrays;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import org.apache.cassandra.db.TypeSizes;
import org.apache.cassandra.utils.concurrent.Ref;
/**
* <p>
* An "open" BitSet implementation that allows direct access to the arrays of words
* storing the bits. Derived from Lucene's OpenBitSet, but with a paged backing array
* (see bits delaration, below).
* </p>
* <p>
* Unlike java.util.bitset, the fact that bits are packed into an array of longs
* is part of the interface. This allows efficient implementation of other algorithms
* by someone other than the author. It also allows one to efficiently implement
* alternate serialization or interchange formats.
* </p>
* <p>
* <code>OpenBitSet</code> is faster than <code>java.util.BitSet</code> in most operations
* and *much* faster at calculating cardinality of sets and results of set operations.
* It can also handle sets of larger cardinality (up to 64 * 2**32-1)
* </p>
* <p>
* The goals of <code>OpenBitSet</code> are the fastest implementation possible, and
* maximum code reuse. Extra safety and encapsulation
* may always be built on top, but if that's built in, the cost can never be removed (and
* hence people re-implement their own version in order to get better performance).
* If you want a "safe", totally encapsulated (and slower and limited) BitSet
* class, use <code>java.util.BitSet</code>.
* </p>
*/
public class OpenBitSet implements IBitSet
{
/**
* We break the bitset up into multiple arrays to avoid promotion failure caused by attempting to allocate
* large, contiguous arrays (CASSANDRA-2466). All sub-arrays but the last are uniformly PAGE_SIZE words;
* to avoid waste in small bloom filters (of which Cassandra has many: one per row) the last sub-array
* is sized to exactly the remaining number of words required to achieve the desired set size (CASSANDRA-3618).
*/
private final long[][] bits;
private int wlen; // number of words (elements) used in the array
private final int pageCount;
private static final int PAGE_SIZE = 4096;
/**
* Constructs an OpenBitSet large enough to hold numBits.
* @param numBits
*/
public OpenBitSet(long numBits)
{
wlen = (int) bits2words(numBits);
int lastPageSize = wlen % PAGE_SIZE;
int fullPageCount = wlen / PAGE_SIZE;
pageCount = fullPageCount + (lastPageSize == 0 ? 0 : 1);
bits = new long[pageCount][];
for (int i = 0; i < fullPageCount; ++i)
bits[i] = new long[PAGE_SIZE];
if (lastPageSize != 0)
bits[bits.length - 1] = new long[lastPageSize];
}
public OpenBitSet()
{
this(64);
}
/**
* @return the pageSize
*/
public int getPageSize()
{
return PAGE_SIZE;
}
public int getPageCount()
{
return pageCount;
}
public long[] getPage(int pageIdx)
{
return bits[pageIdx];
}
/** Returns the current capacity in bits (1 greater than the index of the last bit) */
public long capacity() { return ((long)wlen) << 6; }
@Override
public long offHeapSize()
{
return 0;
}
public void addTo(Ref.IdentityCollection identities)
{
}
/**
* Returns the current capacity of this set. Included for
* compatibility. This is *not* equal to {@link #cardinality}
*/
public long size()
{
return capacity();
}
// @Override -- not until Java 1.6
public long length()
{
return capacity();
}
/** Returns true if there are no set bits */
public boolean isEmpty() { return cardinality()==0; }
/** Expert: gets the number of longs in the array that are in use */
public int getNumWords() { return wlen; }
/**
* Returns true or false for the specified bit index.
* The index should be less than the OpenBitSet size
*/
public boolean get(int index)
{
int i = index >> 6; // div 64
// signed shift will keep a negative index and force an
// array-index-out-of-bounds-exception, removing the need for an explicit check.
int bit = index & 0x3f; // mod 64
long bitmask = 1L << bit;
// TODO perfectionist one can implement this using bit operations
return (bits[i / PAGE_SIZE][i % PAGE_SIZE ] & bitmask) != 0;
}
/**
* Returns true or false for the specified bit index.
* The index should be less than the OpenBitSet size.
*/
public boolean get(long index)
{
int i = (int)(index >> 6); // div 64
int bit = (int)index & 0x3f; // mod 64
long bitmask = 1L << bit;
// TODO perfectionist one can implement this using bit operations
return (bits[i / PAGE_SIZE][i % PAGE_SIZE ] & bitmask) != 0;
}
/**
* Sets the bit at the specified index.
* The index should be less than the OpenBitSet size.
*/
public void set(long index)
{
int wordNum = (int)(index >> 6);
int bit = (int)index & 0x3f;
long bitmask = 1L << bit;
bits[ wordNum / PAGE_SIZE ][ wordNum % PAGE_SIZE ] |= bitmask;
}
/**
* Sets the bit at the specified index.
* The index should be less than the OpenBitSet size.
*/
public void set(int index)
{
int wordNum = index >> 6; // div 64
int bit = index & 0x3f; // mod 64
long bitmask = 1L << bit;
bits[ wordNum / PAGE_SIZE ][ wordNum % PAGE_SIZE ] |= bitmask;
}
/**
* clears a bit.
* The index should be less than the OpenBitSet size.
*/
public void clear(int index)
{
int wordNum = index >> 6;
int bit = index & 0x03f;
long bitmask = 1L << bit;
bits[wordNum / PAGE_SIZE][wordNum % PAGE_SIZE] &= ~bitmask;
// hmmm, it takes one more instruction to clear than it does to set... any
// way to work around this? If there were only 63 bits per word, we could
// use a right shift of 10111111...111 in binary to position the 0 in the
// correct place (using sign extension).
// Could also use Long.rotateRight() or rotateLeft() *if* they were converted
// by the JVM into a native instruction.
// bits[word] &= Long.rotateLeft(0xfffffffe,bit);
}
/**
* clears a bit.
* The index should be less than the OpenBitSet size.
*/
public void clear(long index)
{
int wordNum = (int)(index >> 6); // div 64
int bit = (int)index & 0x3f; // mod 64
long bitmask = 1L << bit;
bits[wordNum / PAGE_SIZE][wordNum % PAGE_SIZE] &= ~bitmask;
}
/**
* Clears a range of bits. Clearing past the end does not change the size of the set.
*
* @param startIndex lower index
* @param endIndex one-past the last bit to clear
*/
public void clear(int startIndex, int endIndex)
{
if (endIndex <= startIndex) return;
int startWord = (startIndex>>6);
if (startWord >= wlen) return;
// since endIndex is one past the end, this is index of the last
// word to be changed.
int endWord = ((endIndex-1)>>6);
long startmask = -1L << startIndex;
long endmask = -1L >>> -endIndex; // 64-(endIndex&0x3f) is the same as -endIndex due to wrap
// invert masks since we are clearing
startmask = ~startmask;
endmask = ~endmask;
if (startWord == endWord)
{
bits[startWord / PAGE_SIZE][startWord % PAGE_SIZE] &= (startmask | endmask);
return;
}
bits[startWord / PAGE_SIZE][startWord % PAGE_SIZE] &= startmask;
int middle = Math.min(wlen, endWord);
if (startWord / PAGE_SIZE == middle / PAGE_SIZE)
{
Arrays.fill(bits[startWord/PAGE_SIZE], (startWord+1) % PAGE_SIZE, middle % PAGE_SIZE, 0L);
} else
{
while (++startWord<middle)
bits[startWord / PAGE_SIZE][startWord % PAGE_SIZE] = 0L;
}
if (endWord < wlen)
{
bits[endWord / PAGE_SIZE][endWord % PAGE_SIZE] &= endmask;
}
}
/** Clears a range of bits. Clearing past the end does not change the size of the set.
*
* @param startIndex lower index
* @param endIndex one-past the last bit to clear
*/
public void clear(long startIndex, long endIndex)
{
if (endIndex <= startIndex) return;
int startWord = (int)(startIndex>>6);
if (startWord >= wlen) return;
// since endIndex is one past the end, this is index of the last
// word to be changed.
int endWord = (int)((endIndex-1)>>6);
long startmask = -1L << startIndex;
long endmask = -1L >>> -endIndex; // 64-(endIndex&0x3f) is the same as -endIndex due to wrap
// invert masks since we are clearing
startmask = ~startmask;
endmask = ~endmask;
if (startWord == endWord)
{
bits[startWord / PAGE_SIZE][startWord % PAGE_SIZE] &= (startmask | endmask);
return;
}
bits[startWord / PAGE_SIZE][startWord % PAGE_SIZE] &= startmask;
int middle = Math.min(wlen, endWord);
if (startWord / PAGE_SIZE == middle / PAGE_SIZE)
{
Arrays.fill(bits[startWord/PAGE_SIZE], (startWord+1) % PAGE_SIZE, middle % PAGE_SIZE, 0L);
} else
{
while (++startWord<middle)
bits[startWord / PAGE_SIZE][startWord % PAGE_SIZE] = 0L;
}
if (endWord < wlen)
{
bits[endWord / PAGE_SIZE][endWord % PAGE_SIZE] &= endmask;
}
}
/** @return the number of set bits */
public long cardinality()
{
long bitCount = 0L;
for (int i=getPageCount();i-->0;)
bitCount+=BitUtil.pop_array(bits[i],0,wlen);
return bitCount;
}
/** this = this AND other */
public void intersect(OpenBitSet other)
{
int newLen= Math.min(this.wlen,other.wlen);
long[][] thisArr = this.bits;
long[][] otherArr = other.bits;
int thisPageSize = PAGE_SIZE;
int otherPageSize = OpenBitSet.PAGE_SIZE;
// testing against zero can be more efficient
int pos=newLen;
while(--pos>=0)
{
thisArr[pos / thisPageSize][ pos % thisPageSize] &= otherArr[pos / otherPageSize][pos % otherPageSize];
}
if (this.wlen > newLen)
{
// fill zeros from the new shorter length to the old length
for (pos=wlen;pos-->newLen;)
thisArr[pos / thisPageSize][ pos % thisPageSize] =0;
}
this.wlen = newLen;
}
// some BitSet compatability methods
//** see {@link intersect} */
public void and(OpenBitSet other)
{
intersect(other);
}
/** Lowers numWords, the number of words in use,
* by checking for trailing zero words.
*/
public void trimTrailingZeros()
{
int idx = wlen-1;
while (idx>=0 && bits[idx / PAGE_SIZE][idx % PAGE_SIZE]==0) idx--;
wlen = idx+1;
}
/** returns the number of 64 bit words it would take to hold numBits */
public static long bits2words(long numBits)
{
return (((numBits-1)>>>6)+1);
}
/** returns true if both sets have the same bits set */
@Override
public boolean equals(Object o)
{
if (this == o) return true;
if (!(o instanceof OpenBitSet)) return false;
OpenBitSet a;
OpenBitSet b = (OpenBitSet)o;
// make a the larger set.
if (b.wlen > this.wlen)
{
a = b; b=this;
}
else
{
a=this;
}
int aPageSize = OpenBitSet.PAGE_SIZE;
int bPageSize = OpenBitSet.PAGE_SIZE;
// check for any set bits out of the range of b
for (int i=a.wlen-1; i>=b.wlen; i--)
{
if (a.bits[i/aPageSize][i % aPageSize]!=0) return false;
}
for (int i=b.wlen-1; i>=0; i--)
{
if (a.bits[i/aPageSize][i % aPageSize] != b.bits[i/bPageSize][i % bPageSize]) return false;
}
return true;
}
@Override
public int hashCode()
{
// Start with a zero hash and use a mix that results in zero if the input is zero.
// This effectively truncates trailing zeros without an explicit check.
long h = 0;
for (int i = wlen; --i>=0;)
{
h ^= bits[i / PAGE_SIZE][i % PAGE_SIZE];
h = (h << 1) | (h >>> 63); // rotate left
}
// fold leftmost bits into right and add a constant to prevent
// empty sets from returning 0, which is too common.
return (int)((h>>32) ^ h) + 0x98761234;
}
public void close()
{
// noop, let GC do the cleanup.
}
public void serialize(DataOutput out) throws IOException
{
int bitLength = getNumWords();
int pageSize = getPageSize();
int pageCount = getPageCount();
out.writeInt(bitLength);
for (int p = 0; p < pageCount; p++)
{
long[] bits = getPage(p);
for (int i = 0; i < pageSize && bitLength-- > 0; i++)
{
out.writeLong(bits[i]);
}
}
}
public long serializedSize()
{
int bitLength = getNumWords();
int pageSize = getPageSize();
int pageCount = getPageCount();
long size = TypeSizes.sizeof(bitLength); // length
for (int p = 0; p < pageCount; p++)
{
long[] bits = getPage(p);
for (int i = 0; i < pageSize && bitLength-- > 0; i++)
size += TypeSizes.sizeof(bits[i]); // bucket
}
return size;
}
public void clear()
{
clear(0, capacity());
}
public static OpenBitSet deserialize(DataInput in) throws IOException
{
long bitLength = in.readInt();
OpenBitSet bs = new OpenBitSet(bitLength << 6);
int pageSize = bs.getPageSize();
int pageCount = bs.getPageCount();
for (int p = 0; p < pageCount; p++)
{
long[] bits = bs.getPage(p);
for (int i = 0; i < pageSize && bitLength-- > 0; i++)
bits[i] = in.readLong();
}
return bs;
}
}
| |
package ca.uhn.fhir.jpa.dao.r4;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
import ca.uhn.fhir.util.TestUtil;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.Bundle;
import org.hl7.fhir.r4.model.IdType;
import org.hl7.fhir.r4.model.Organization;
import org.hl7.fhir.r4.model.Patient;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
public class FhirResourceDaoR4DeleteTest extends BaseJpaR4Test {
private static final Logger ourLog = LoggerFactory.getLogger(FhirResourceDaoR4DeleteTest.class);
@AfterEach
public void after() {
myDaoConfig.setDeleteEnabled(new DaoConfig().isDeleteEnabled());
}
@Test
public void testDeleteMarksResourceAndVersionAsDeleted() {
Patient p = new Patient();
p.setActive(true);
IIdType id = myPatientDao.create(p).getId().toUnqualifiedVersionless();
myPatientDao.delete(id);
// Table should be marked as deleted
runInTransaction(() -> {
ResourceTable resourceTable = myResourceTableDao.findById(id.getIdPartAsLong()).get();
assertNotNull(resourceTable.getDeleted());
});
// Current version should be marked as deleted
runInTransaction(() -> {
ResourceHistoryTable resourceTable = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id.getIdPartAsLong(), 1);
assertNull(resourceTable.getDeleted());
assertNotNull(resourceTable.getPersistentId());
});
runInTransaction(() -> {
ResourceHistoryTable resourceTable = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id.getIdPartAsLong(), 2);
assertNotNull(resourceTable.getDeleted());
});
try {
myPatientDao.read(id.toUnqualifiedVersionless());
fail();
} catch (ResourceGoneException e) {
// good
}
myPatientDao.read(id.toUnqualifiedVersionless().withVersion("1"));
try {
myPatientDao.read(id.toUnqualifiedVersionless().withVersion("2"));
fail();
} catch (ResourceGoneException e) {
// good
}
}
@Test
public void testDeleteDisabled() {
myDaoConfig.setDeleteEnabled(false);
Patient p = new Patient();
p.setActive(true);
IIdType pId = myPatientDao.create(p).getId().toUnqualifiedVersionless();
try {
myPatientDao.delete(pId);
fail();
} catch (PreconditionFailedException e) {
assertEquals("Resource deletion is not permitted on this server", e.getMessage());
}
}
@Test
public void testDeleteCircularReferenceInTransaction() {
// Create two resources with a circular reference
Organization org1 = new Organization();
org1.setId(IdType.newRandomUuid());
Organization org2 = new Organization();
org2.setId(IdType.newRandomUuid());
org1.getPartOf().setReference(org2.getId());
org2.getPartOf().setReference(org1.getId());
// Upload them in a transaction
Bundle createTransaction = new Bundle();
createTransaction.setType(Bundle.BundleType.TRANSACTION);
createTransaction
.addEntry()
.setResource(org1)
.setFullUrl(org1.getId())
.getRequest()
.setMethod(Bundle.HTTPVerb.POST)
.setUrl("Organization");
createTransaction
.addEntry()
.setResource(org2)
.setFullUrl(org2.getId())
.getRequest()
.setMethod(Bundle.HTTPVerb.POST)
.setUrl("Organization");
Bundle createResponse = mySystemDao.transaction(mySrd, createTransaction);
ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(createResponse));
IdType orgId1 = new IdType(createResponse.getEntry().get(0).getResponse().getLocation()).toUnqualifiedVersionless();
IdType orgId2 = new IdType(createResponse.getEntry().get(1).getResponse().getLocation()).toUnqualifiedVersionless();
// Nope, can't delete 'em!
try {
myOrganizationDao.delete(orgId1);
fail();
} catch (ResourceVersionConflictException e) {
// good
}
try {
myOrganizationDao.delete(orgId2);
fail();
} catch (ResourceVersionConflictException e) {
// good
}
// Now in a transaction
Bundle deleteTransaction = new Bundle();
deleteTransaction.setType(Bundle.BundleType.TRANSACTION);
deleteTransaction.addEntry()
.getRequest()
.setMethod(Bundle.HTTPVerb.DELETE)
.setUrl(orgId1.getValue());
deleteTransaction.addEntry()
.getRequest()
.setMethod(Bundle.HTTPVerb.DELETE)
.setUrl(orgId2.getValue());
ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(deleteTransaction));
mySystemDao.transaction(mySrd, deleteTransaction);
// Make sure they were deleted
try {
myOrganizationDao.read(orgId1);
fail();
} catch (ResourceGoneException e) {
// good
}
try {
myOrganizationDao.read(orgId2);
fail();
} catch (ResourceGoneException e) {
// good
}
}
@Test
public void testResourceIsConsideredDeletedIfOnlyResourceTableEntryIsDeleted() {
Patient p = new Patient();
p.setActive(true);
IIdType id = myPatientDao.create(p).getId().toUnqualifiedVersionless();
myPatientDao.delete(id);
// Table should be marked as deleted
runInTransaction(() -> {
ResourceTable resourceTable = myResourceTableDao.findById(id.getIdPartAsLong()).get();
assertNotNull(resourceTable.getDeleted());
});
// Mark the current history version as not-deleted even though the actual resource
// table entry is marked deleted
runInTransaction(() -> {
ResourceHistoryTable resourceTable = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(id.getIdPartAsLong(), 2);
resourceTable.setDeleted(null);
myResourceHistoryTableDao.save(resourceTable);
});
try {
myPatientDao.read(id.toUnqualifiedVersionless());
fail();
} catch (ResourceGoneException e) {
// good
}
myPatientDao.read(id.toUnqualifiedVersionless().withVersion("1"));
try {
myPatientDao.read(id.toUnqualifiedVersionless().withVersion("2"));
fail();
} catch (ResourceGoneException e) {
// good
}
}
@Test
public void testDeleteIgnoreReferentialIntegrityForPaths() {
}
}
| |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.ide.plugins.newui;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.util.io.URLUtil;
import org.jetbrains.annotations.NotNull;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* @author Alexander Lobas
*/
public abstract class SearchQueryParser {
public String searchQuery;
protected void addToSearchQuery(@NotNull String query) {
if (searchQuery == null) {
searchQuery = query;
}
else {
searchQuery += " " + query;
}
}
@NotNull
protected static List<String> splitQuery(@NotNull String query) {
List<String> words = new ArrayList<>();
int length = query.length();
int index = 0;
while (index < length) {
char startCh = query.charAt(index++);
if (startCh == ' ') {
continue;
}
if (startCh == '"') {
int end = query.indexOf('"', index);
if (end == -1) {
break;
}
words.add(query.substring(index, end));
index = end + 1;
continue;
}
int start = index - 1;
while (index <= length) {
if (index == length) {
words.add(query.substring(start));
break;
}
char nextCh = query.charAt(index++);
if (nextCh == ':' || nextCh == ' ' || index == length) {
words.add(query.substring(start, nextCh == ' ' ? index - 1 : index));
break;
}
}
}
return words;
}
@NotNull
public static String getTagQuery(@NotNull String tag) {
return "/tag:" + (tag.indexOf(' ') == -1 ? tag : StringUtil.wrapWithDoubleQuote(tag));
}
@NotNull
public static String wrapAttribute(@NotNull String value) {
return StringUtil.containsAnyChar(value, " ,:") ? StringUtil.wrapWithDoubleQuote(value) : value;
}
public static class Marketplace extends SearchQueryParser {
public final Set<String> vendors = new HashSet<>();
public final Set<String> tags = new HashSet<>();
public final Set<String> repositories = new HashSet<>();
public String sortBy;
public Marketplace(@NotNull String query) {
parse(query);
}
private void parse(@NotNull String query) {
List<String> words = splitQuery(query);
int size = words.size();
if (size == 0) {
return;
}
if (size == 1) {
addToSearchQuery(words.get(0));
return;
}
int index = 0;
while (index < size) {
String name = words.get(index++);
if (name.endsWith(":")) {
if (index < size) {
handleAttribute(name, words.get(index++));
}
else {
addToSearchQuery(query);
return;
}
}
else {
addToSearchQuery(name);
}
}
}
protected void handleAttribute(@NotNull String name, @NotNull String value) {
if (name.equals(SearchWords.TAG.getValue())) {
tags.add(value);
}
else if (name.equals(SearchWords.SORT_BY.getValue())) {
sortBy = value;
}
else if (name.equals(SearchWords.REPOSITORY.getValue())) {
repositories.add(value);
}
else if (name.equals(SearchWords.ORGANIZATION.getValue())) {
vendors.add(value);
}
}
@NotNull
public String getUrlQuery() {
StringBuilder url = new StringBuilder();
if ("featured".equals(sortBy)) {
url.append("is_featured_search=true");
}
else if ("updated".equals(sortBy)) {
url.append("orderBy=update+date");
}
else if ("downloads".equals(sortBy)) {
url.append("orderBy=downloads");
}
else if ("rating".equals(sortBy)) {
url.append("orderBy=rating");
}
else if ("name".equals(sortBy)) {
url.append("orderBy=name");
}
for (String tag : tags) {
if (url.length() > 0) {
url.append("&");
}
url.append("tags=").append(URLUtil.encodeURIComponent(tag));
}
for (String vendor : vendors) {
if (url.length() > 0) {
url.append("&");
}
url.append("organization=").append(URLUtil.encodeURIComponent(vendor));
}
if (searchQuery != null) {
if (url.length() > 0) {
url.append("&");
}
url.append("search=").append(URLUtil.encodeURIComponent(searchQuery));
}
return url.toString();
}
}
public static class Installed extends SearchQueryParser {
public final Set<String> vendors = new HashSet<>();
public final Set<String> tags = new HashSet<>();
public boolean enabled;
public boolean disabled;
public boolean bundled;
public boolean downloaded;
public boolean invalid;
public boolean needUpdate;
public boolean attributes;
public Installed(@NotNull String query) {
parse(query);
}
private void parse(@NotNull String query) {
List<String> words = splitQuery(query);
int size = words.size();
if (size == 0) {
return;
}
int index = 0;
while (index < size) {
String name = words.get(index++);
if (name.startsWith("/")) {
if (name.equals(SearchWords.ORGANIZATION.getValue()) || name.equals(SearchWords.TAG.getValue())) {
if (index < size) {
handleAttribute(name, words.get(index++));
}
else {
addToSearchQuery(query);
break;
}
}
else {
handleAttribute(name, "");
}
}
else {
addToSearchQuery(name);
}
}
attributes = enabled || disabled || bundled || downloaded || invalid || needUpdate;
}
protected void handleAttribute(@NotNull String name, @NotNull String value) {
if ("/enabled".equals(name)) {
enabled = true;
}
else if ("/disabled".equals(name)) {
disabled = true;
}
else if ("/bundled".equals(name)) {
bundled = true;
}
else if ("/downloaded".equals(name)) {
downloaded = true;
}
else if ("/invalid".equals(name)) {
invalid = true;
}
else if ("/outdated".equals(name)) {
needUpdate = true;
}
else if (SearchWords.ORGANIZATION.getValue().equals(name)) {
vendors.add(value);
}
else if (SearchWords.TAG.getValue().equals(name)) {
tags.add(value);
}
}
}
}
| |
package com.mapswithme.maps.widget.placepage;
import android.app.Activity;
import android.content.ActivityNotFoundException;
import android.content.Context;
import android.content.Intent;
import android.content.res.Configuration;
import android.content.res.TypedArray;
import android.location.Location;
import android.net.Uri;
import android.os.Build;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentActivity;
import android.support.v4.app.FragmentManager;
import android.support.v7.widget.Toolbar;
import android.text.TextUtils;
import android.util.AttributeSet;
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuItem;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import android.view.inputmethod.EditorInfo;
import android.webkit.WebView;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.PopupMenu;
import android.widget.RatingBar;
import android.widget.RelativeLayout;
import android.widget.ScrollView;
import android.widget.TextView;
import java.util.ArrayList;
import java.util.List;
import com.mapswithme.maps.BuildConfig;
import com.mapswithme.maps.Framework;
import com.mapswithme.maps.MwmActivity;
import com.mapswithme.maps.MwmApplication;
import com.mapswithme.maps.R;
import com.mapswithme.maps.api.ParsedMwmRequest;
import com.mapswithme.maps.bookmarks.ChooseBookmarkCategoryFragment;
import com.mapswithme.maps.bookmarks.data.Bookmark;
import com.mapswithme.maps.bookmarks.data.BookmarkManager;
import com.mapswithme.maps.bookmarks.data.DistanceAndAzimut;
import com.mapswithme.maps.bookmarks.data.Icon;
import com.mapswithme.maps.bookmarks.data.MapObject;
import com.mapswithme.maps.bookmarks.data.MapObject.MapObjectType;
import com.mapswithme.maps.bookmarks.data.MapObject.Poi;
import com.mapswithme.maps.bookmarks.data.Metadata;
import com.mapswithme.maps.location.LocationHelper;
import com.mapswithme.maps.routing.RoutingController;
import com.mapswithme.maps.widget.ArrowView;
import com.mapswithme.maps.widget.BaseShadowController;
import com.mapswithme.maps.widget.ObservableScrollView;
import com.mapswithme.maps.widget.ScrollViewShadowController;
import com.mapswithme.util.Graphics;
import com.mapswithme.util.InputUtils;
import com.mapswithme.util.LocationUtils;
import com.mapswithme.util.StringUtils;
import com.mapswithme.util.UiUtils;
import com.mapswithme.util.Utils;
import com.mapswithme.util.concurrency.UiThread;
import com.mapswithme.util.sharing.ShareOption;
import com.mapswithme.util.statistics.AlohaHelper;
import com.mapswithme.util.statistics.Statistics;
public class PlacePageView extends RelativeLayout implements View.OnClickListener, View.OnLongClickListener
{
private static final String PREF_USE_DMS = "use_dms";
private boolean mIsDocked;
private boolean mIsFloating;
// Preview
private TextView mTvTitle;
private Toolbar mToolbar;
private TextView mTvSubtitle;
private TextView mTvOpened;
private ArrowView mAvDirection;
private TextView mTvDistance;
private RatingBar mRbStars;
private TextView mTvElevation;
// Place page details
private ScrollView mPpDetails;
private RelativeLayout mAddress;
private TextView mTvAddress;
private LinearLayout mPhone;
private TextView mTvPhone;
private LinearLayout mWebsite;
private TextView mTvWebsite;
private LinearLayout mLatlon;
private TextView mTvLatlon;
private LinearLayout mSchedule;
private TextView mTvSchedule;
private LinearLayout mWifi;
private LinearLayout mEmail;
private TextView mTvEmail;
private LinearLayout mOperator;
private TextView mTvOperator;
private LinearLayout mCuisine;
private TextView mTvCuisine;
private LinearLayout mWiki;
private TextView mTvWiki;
private LinearLayout mEntrance;
private TextView mTvEntrance;
// Bookmark
private ImageView mIvColor;
private EditText mEtBookmarkName;
private TextView mTvNotes;
private WebView mWvDescription;
private TextView mTvDescription;
private Button mBtnEditHtmlDescription;
private TextView mTvBookmarkGroup;
// Place page buttons
private View mGeneralButtonsFrame;
private View mRouteButtonsFrame;
private View mApiBack;
private ImageView mIvBookmark;
private View mRoutingButton;
// Animations
private BaseShadowController mShadowController;
private BasePlacePageAnimationController mAnimationController;
private MwmActivity.LeftAnimationTrackListener mLeftAnimationTrackListener;
// Data
private MapObject mMapObject;
private MapObject mBookmarkedMapObject;
private boolean mIsLatLonDms;
public enum State
{
HIDDEN,
PREVIEW,
BOOKMARK,
DETAILS
}
public PlacePageView(Context context)
{
this(context, null, 0);
}
public PlacePageView(Context context, AttributeSet attrs)
{
this(context, attrs, 0);
}
public PlacePageView(Context context, AttributeSet attrs, int defStyleAttr)
{
super(context, attrs);
mIsLatLonDms = MwmApplication.prefs().getBoolean(PREF_USE_DMS, false);
init(attrs, defStyleAttr);
}
private void initViews()
{
LayoutInflater.from(getContext()).inflate(R.layout.place_page, this);
ViewGroup ppPreview = (ViewGroup) findViewById(R.id.pp__preview);
mTvTitle = (TextView) ppPreview.findViewById(R.id.tv__title);
mToolbar = (Toolbar) findViewById(R.id.toolbar);
mTvSubtitle = (TextView) ppPreview.findViewById(R.id.tv__subtitle);
mTvOpened = (TextView) ppPreview.findViewById(R.id.tv__opened_till);
mTvDistance = (TextView) ppPreview.findViewById(R.id.tv__straight_distance);
mAvDirection = (ArrowView) ppPreview.findViewById(R.id.av__direction);
mAvDirection.setOnClickListener(this);
mAvDirection.setImageResource(R.drawable.direction);
mRbStars = (RatingBar) ppPreview.findViewById(R.id.rb__stars);
mTvElevation = (TextView) ppPreview.findViewById(R.id.tv__peak_elevation);
mPpDetails = (ScrollView) findViewById(R.id.pp__details);
mAddress = (RelativeLayout) mPpDetails.findViewById(R.id.ll__place_name);
mTvAddress = (TextView) mPpDetails.findViewById(R.id.tv__place_address);
mPhone = (LinearLayout) mPpDetails.findViewById(R.id.ll__place_phone);
mPhone.setOnClickListener(this);
mTvPhone = (TextView) mPpDetails.findViewById(R.id.tv__place_phone);
mWebsite = (LinearLayout) mPpDetails.findViewById(R.id.ll__place_website);
mWebsite.setOnClickListener(this);
mTvWebsite = (TextView) mPpDetails.findViewById(R.id.tv__place_website);
mLatlon = (LinearLayout) mPpDetails.findViewById(R.id.ll__place_latlon);
mLatlon.setOnClickListener(this);
mTvLatlon = (TextView) mPpDetails.findViewById(R.id.tv__place_latlon);
mSchedule = (LinearLayout) mPpDetails.findViewById(R.id.ll__place_schedule);
mTvSchedule = (TextView) mPpDetails.findViewById(R.id.tv__place_schedule);
mWifi = (LinearLayout) mPpDetails.findViewById(R.id.ll__place_wifi);
mIvColor = (ImageView) mPpDetails.findViewById(R.id.iv__bookmark_color);
mIvColor.setOnClickListener(this);
mEmail = (LinearLayout) mPpDetails.findViewById(R.id.ll__place_email);
mEmail.setOnClickListener(this);
mTvEmail = (TextView) mEmail.findViewById(R.id.tv__place_email);
mOperator = (LinearLayout) mPpDetails.findViewById(R.id.ll__place_operator);
mOperator.setOnClickListener(this);
mTvOperator = (TextView) mOperator.findViewById(R.id.tv__place_operator);
mCuisine = (LinearLayout) mPpDetails.findViewById(R.id.ll__place_cuisine);
mTvCuisine = (TextView) mCuisine.findViewById(R.id.tv__place_cuisine);
mWiki = (LinearLayout) mPpDetails.findViewById(R.id.ll__place_wiki);
mWiki.setOnClickListener(this);
mEntrance = (LinearLayout) mPpDetails.findViewById(R.id.ll__place_entrance);
mTvEntrance = (TextView) mEntrance.findViewById(R.id.tv__place_entrance);
mLatlon.setOnLongClickListener(this);
mAddress.setOnLongClickListener(this);
mPhone.setOnLongClickListener(this);
mWebsite.setOnLongClickListener(this);
mSchedule.setOnLongClickListener(this);
mEmail.setOnLongClickListener(this);
mOperator.setOnLongClickListener(this);
mWiki.setOnLongClickListener(this);
mEtBookmarkName = (EditText) mPpDetails.findViewById(R.id.et__bookmark_name);
mEtBookmarkName.setOnEditorActionListener(new TextView.OnEditorActionListener()
{
@Override
public boolean onEditorAction(TextView v, int actionId, KeyEvent event)
{
if (actionId == EditorInfo.IME_ACTION_DONE)
{
saveBookmarkNameIfUpdated();
refreshPreview();
}
return false;
}
});
mTvNotes = (TextView) mPpDetails.findViewById(R.id.tv__bookmark_notes);
mTvNotes.setOnClickListener(this);
mTvBookmarkGroup = (TextView) mPpDetails.findViewById(R.id.tv__bookmark_group);
mTvBookmarkGroup.setOnClickListener(this);
mWvDescription = (WebView) mPpDetails.findViewById(R.id.wv__description);
mTvDescription = (TextView) mPpDetails.findViewById(R.id.tv__description);
mTvDescription.setOnClickListener(this);
mBtnEditHtmlDescription = (Button) mPpDetails.findViewById(R.id.btn__edit_html_bookmark);
mBtnEditHtmlDescription.setOnClickListener(this);
ViewGroup ppButtons = (ViewGroup) findViewById(R.id.pp__buttons);
mGeneralButtonsFrame = ppButtons.findViewById(R.id.general);
mApiBack = mGeneralButtonsFrame.findViewById(R.id.ll__api_back);
mApiBack.setOnClickListener(this);
final View bookmarkGroup = mGeneralButtonsFrame.findViewById(R.id.ll__bookmark);
bookmarkGroup.setOnClickListener(this);
mIvBookmark = (ImageView) bookmarkGroup.findViewById(R.id.iv__bookmark);
mGeneralButtonsFrame.findViewById(R.id.ll__share).setOnClickListener(this);
mRoutingButton = mGeneralButtonsFrame.findViewById(R.id.ll__route);
mRouteButtonsFrame = ppButtons.findViewById(R.id.routing);
mRouteButtonsFrame.findViewById(R.id.from).setOnClickListener(this);
mRouteButtonsFrame.findViewById(R.id.to).setOnClickListener(this);
mShadowController = new ScrollViewShadowController((ObservableScrollView) mPpDetails)
.addBottomShadow()
.attach();
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP)
setElevation(UiUtils.dimen(R.dimen.placepage_elevation));
if (getResources().getConfiguration().orientation == Configuration.ORIENTATION_LANDSCAPE)
mPpDetails.setBackgroundResource(0);
}
private void init(AttributeSet attrs, int defStyleAttr)
{
initViews();
if (isInEditMode())
return;
final TypedArray attrArray = getContext().obtainStyledAttributes(attrs, R.styleable.PlacePageView, defStyleAttr, 0);
final int animationType = attrArray.getInt(R.styleable.PlacePageView_animationType, 0);
mIsDocked = attrArray.getBoolean(R.styleable.PlacePageView_docked, false);
mIsFloating = attrArray.getBoolean(R.styleable.PlacePageView_floating, false);
attrArray.recycle();
// switch with values from "animationType" from attrs.xml
switch (animationType)
{
case 0:
mAnimationController = new PlacePageBottomAnimationController(this);
break;
case 1:
mAnimationController = new PlacePageLeftAnimationController(this);
break;
}
mAnimationController.initialHide();
}
public void restore()
{
if (mMapObject != null)
subscribeBookmarkEditFragment(null);
}
@Override
public boolean onTouchEvent(@NonNull MotionEvent event)
{
return mAnimationController.onTouchEvent(event);
}
@Override
public boolean onInterceptTouchEvent(MotionEvent event)
{
return mAnimationController.onInterceptTouchEvent(event);
}
public boolean isDocked()
{
return mIsDocked;
}
public boolean isFloating()
{
return mIsFloating;
}
public State getState()
{
return mAnimationController.getState();
}
public void setState(State state)
{
InputUtils.hideKeyboard(mEtBookmarkName);
mPpDetails.scrollTo(0, 0);
if (mMapObject != null)
mAnimationController.setState(state, mMapObject.getType());
}
public MapObject getMapObject()
{
saveBookmarkNameIfUpdated();
return mMapObject;
}
public void setMapObject(MapObject mapObject)
{
if (hasMapObject(mapObject))
return;
if (!(mapObject instanceof Bookmark))
saveBookmarkNameIfUpdated();
mMapObject = mapObject;
refreshViews();
}
public boolean hasMapObject(MapObject mo)
{
if (mo == null && mMapObject == null)
return true;
else if (mMapObject != null)
return mMapObject.sameAs(mo);
return false;
}
public void refreshViews()
{
if (mMapObject == null)
return;
mMapObject.setDefaultIfEmpty();
refreshPreview();
refreshDetails();
final Location loc = LocationHelper.INSTANCE.getLastLocation();
switch (mMapObject.getType())
{
case BOOKMARK:
refreshDistanceToObject(loc);
showBookmarkDetails();
refreshButtons(false, true);
break;
case POI:
case ADDITIONAL_LAYER:
refreshDistanceToObject(loc);
hideBookmarkDetails();
refreshButtons(false, true);
break;
case API_POINT:
refreshDistanceToObject(loc);
hideBookmarkDetails();
refreshButtons(true, true);
break;
case MY_POSITION:
refreshMyPosition(loc);
hideBookmarkDetails();
refreshButtons(false, false);
break;
}
UiThread.runLater(new Runnable()
{
@Override
public void run()
{
mShadowController.updateShadows();
requestLayout();
}
});
}
private void refreshPreview()
{
mTvTitle.setText(mMapObject.getName());
if (mToolbar != null)
mToolbar.setTitle(mMapObject.getName());
String subtitle = mMapObject.getPoiTypeName();
final String cuisine = mMapObject.getMetadata(Metadata.MetadataType.FMD_CUISINE);
if (cuisine != null)
subtitle += ", " + translateCuisine(cuisine);
mTvSubtitle.setText(subtitle);
mAvDirection.setVisibility(View.GONE);
// TODO show/hide mTvOpened after schedule fill be parsed
}
public String translateCuisine(String cuisine)
{
if (TextUtils.isEmpty(cuisine))
return cuisine;
// cuisines translations can contain unsupported symbols, and res ids
// replace them with supported "_"( so ', ' and ' ' are replaced with underlines)
final String[] cuisines = cuisine.split(";");
String result = "";
// search translations for each cuisine
for (String cuisineRaw : cuisines)
{
final String cuisineKey = cuisineRaw.replace(", ", "_").replace(' ', '_').toLowerCase();
int resId = getResources().getIdentifier("cuisine_" + cuisineKey, "string", BuildConfig.APPLICATION_ID);
result += resId == 0 ? cuisineRaw : getResources().getString(resId);
}
return result;
}
private void refreshDetails()
{
refreshLatLon();
final String website = mMapObject.getMetadata(Metadata.MetadataType.FMD_WEBSITE);
refreshMetadataOrHide(TextUtils.isEmpty(website) ? mMapObject.getMetadata(Metadata.MetadataType.FMD_URL) : website, mWebsite, mTvWebsite);
refreshMetadataOrHide(mMapObject.getMetadata(Metadata.MetadataType.FMD_PHONE_NUMBER), mPhone, mTvPhone);
refreshMetadataOrHide(mMapObject.getMetadata(Metadata.MetadataType.FMD_EMAIL), mEmail, mTvEmail);
refreshMetadataOrHide(mMapObject.getMetadata(Metadata.MetadataType.FMD_OPERATOR), mOperator, mTvOperator);
refreshMetadataOrHide(translateCuisine(mMapObject.getMetadata(Metadata.MetadataType.FMD_CUISINE)), mCuisine, mTvCuisine);
// TODO @yunikkk uncomment wiki display when data with correct wiki representation(urlencoded once) will be ready
// refreshMetadataOrHide(mMapObject.getMetadata(Metadata.MetadataType.FMD_WIKIPEDIA), mWiki, null);
refreshMetadataOrHide(mMapObject.getMetadata(Metadata.MetadataType.FMD_INTERNET), mWifi, null);
refreshMetadataOrHide(mMapObject.getMetadata(Metadata.MetadataType.FMD_FLATS), mEntrance, mTvEntrance);
// TODO throw away parsing hack when data will be parsed correctly in core
final String rawSchedule = mMapObject.getMetadata(Metadata.MetadataType.FMD_OPEN_HOURS);
refreshMetadataOrHide(TextUtils.isEmpty(rawSchedule) ? null : rawSchedule.replace("; ", "\n").replace(';', '\n'), mSchedule, mTvSchedule);
refreshMetadataStars(mMapObject.getMetadata(Metadata.MetadataType.FMD_STARS));
UiUtils.setTextAndHideIfEmpty(mTvElevation, mMapObject.getMetadata(Metadata.MetadataType.FMD_ELE));
}
private void hideBookmarkDetails()
{
mIvBookmark.setImageDrawable(Graphics.tint(getContext(), R.drawable.ic_bookmarks_off, R.attr.iconTint));
}
private void showBookmarkDetails()
{
final Bookmark bookmark = (Bookmark) mMapObject;
mEtBookmarkName.setText(bookmark.getName());
mTvBookmarkGroup.setText(bookmark.getCategoryName(getContext()));
mIvColor.setImageResource(bookmark.getIcon().getSelectedResId());
mIvBookmark.setImageResource(R.drawable.ic_bookmarks_on);
final String notes = bookmark.getBookmarkDescription();
if (notes.isEmpty())
UiUtils.hide(mWvDescription, mBtnEditHtmlDescription, mTvDescription);
else if (StringUtils.isHtml(notes))
{
mWvDescription.loadData(notes, "text/html; charset=utf-8", null);
UiUtils.show(mWvDescription, mBtnEditHtmlDescription);
UiUtils.hide(mTvDescription);
}
else
{
UiUtils.hide(mWvDescription, mBtnEditHtmlDescription);
UiUtils.setTextAndShow(mTvDescription, notes);
}
}
private void refreshButtons(boolean showBackButton, boolean showRoutingButton)
{
if (RoutingController.get().isPlanning())
{
UiUtils.show(mRouteButtonsFrame);
UiUtils.hide(mGeneralButtonsFrame);
}
else
{
UiUtils.show(mGeneralButtonsFrame);
UiUtils.hide(mRouteButtonsFrame);
UiUtils.showIf(showBackButton || ParsedMwmRequest.isPickPointMode(), mApiBack);
UiUtils.showIf(showRoutingButton, mRoutingButton);
}
}
public void refreshLocation(Location l)
{
if (mMapObject == null)
return;
if (mMapObject.getType() == MapObjectType.MY_POSITION)
refreshMyPosition(l);
else
refreshDistanceToObject(l);
}
private void refreshMyPosition(Location l)
{
mTvDistance.setVisibility(View.GONE);
if (l == null)
return;
final StringBuilder builder = new StringBuilder();
if (l.hasAltitude())
builder.append(Framework.nativeFormatAltitude(l.getAltitude()));
if (l.hasSpeed())
builder.append(" ")
.append(Framework.nativeFormatSpeed(l.getSpeed()));
mTvSubtitle.setText(builder.toString());
mMapObject.setLat(l.getLatitude());
mMapObject.setLon(l.getLongitude());
refreshLatLon();
}
private void refreshDistanceToObject(Location l)
{
if (l != null)
{
mTvDistance.setVisibility(View.VISIBLE);
final DistanceAndAzimut distanceAndAzimuth = Framework.nativeGetDistanceAndAzimutFromLatLon(
mMapObject.getLat(), mMapObject.getLon(),
l.getLatitude(), l.getLongitude(), 0.0);
mTvDistance.setText(distanceAndAzimuth.getDistance());
}
else
mTvDistance.setVisibility(View.GONE);
}
private void refreshLatLon()
{
final double lat = mMapObject.getLat();
final double lon = mMapObject.getLon();
final String[] latLon = Framework.nativeFormatLatLonToArr(lat, lon, mIsLatLonDms);
if (latLon.length == 2)
mTvLatlon.setText(latLon[0] + ", " + latLon[1]);
}
private static void refreshMetadataOrHide(String metadata, LinearLayout metaLayout, TextView metaTv)
{
if (!TextUtils.isEmpty(metadata))
{
metaLayout.setVisibility(View.VISIBLE);
if (metaTv != null)
metaTv.setText(metadata);
}
else
metaLayout.setVisibility(View.GONE);
}
private void refreshMetadataStars(String stars)
{
if (TextUtils.isEmpty(stars))
{
mRbStars.setVisibility(View.GONE);
return;
}
try
{
mRbStars.setRating(Float.parseFloat(stars));
mRbStars.setVisibility(View.VISIBLE);
} catch (NumberFormatException e)
{
mRbStars.setVisibility(View.GONE);
}
}
public void refreshAzimuth(double northAzimuth)
{
if (getState() == State.HIDDEN || mMapObject == null || mMapObject.getType() == MapObjectType.MY_POSITION)
return;
final Location location = LocationHelper.INSTANCE.getLastLocation();
if (location == null)
return;
final double azimuth = Framework.nativeGetDistanceAndAzimutFromLatLon(mMapObject.getLat(), mMapObject.getLon(),
location.getLatitude(), location.getLongitude(),
northAzimuth)
.getAzimuth();
if (azimuth >= 0)
{
UiUtils.show(mAvDirection);
mAvDirection.setAzimuth(azimuth);
}
}
public void setOnVisibilityChangedListener(BasePlacePageAnimationController.OnVisibilityChangedListener listener)
{
mAnimationController.setOnVisibilityChangedListener(listener);
}
public void onResume()
{
if (mMapObject == null)
return;
checkBookmarkWasDeleted();
checkApiWasCanceled();
}
// TODO remove that method completely. host activity should check that itself
private void checkApiWasCanceled()
{
if ((mMapObject.getType() == MapObjectType.API_POINT) && !ParsedMwmRequest.hasRequest())
setMapObject(null);
}
// TODO refactor processing of bookmarks.
private void checkBookmarkWasDeleted()
{
// We need to check, if content of body is still valid
if (mMapObject.getType() == MapObjectType.BOOKMARK)
{
final Bookmark bmk = (Bookmark) mMapObject;
boolean deleted = false;
if (BookmarkManager.INSTANCE.getCategoriesCount() <= bmk.getCategoryId())
deleted = true;
else if (BookmarkManager.INSTANCE.getCategoryById(bmk.getCategoryId()).getBookmarksCount() <= bmk.getBookmarkId())
deleted = true;
else if (BookmarkManager.INSTANCE.getBookmark(bmk.getCategoryId(), bmk.getBookmarkId()).getLat() != bmk.getLat())
deleted = true;
// We can do check above, because lat/lon cannot be changed from edit screen.
if (deleted)
{
// Make Poi from bookmark
final MapObject p = new Poi(mMapObject.getName(), mMapObject.getLat(), mMapObject.getLon(), null);
setMapObject(p);
// TODO how to handle the case, when bookmark was moved to another group?
}
else
{
// Update data for current bookmark
final Bookmark updatedBmk = BookmarkManager.INSTANCE.getBookmark(bmk.getCategoryId(), bmk.getBookmarkId());
setMapObject(null);
setMapObject(updatedBmk);
}
}
}
private void saveBookmarkNameIfUpdated()
{
// Can't save bookmark name if current object is not bookmark.
if (mMapObject == null || !(mMapObject instanceof Bookmark))
return;
final Bookmark bookmark = (Bookmark) mMapObject;
final String name = mEtBookmarkName.getText().toString();
bookmark.setParams(name, null, bookmark.getBookmarkDescription());
}
/**
* Adds listener to {@link EditDescriptionFragment} to catch notification about bookmark description edit is complete.
* <br/>When the user rotates device screen the listener is lost, so we must re-subscribe again.
* @param fragment if specified - explicitely subscribe to this fragment. Otherwise try to find the fragment by hands.
*/
private void subscribeBookmarkEditFragment(@Nullable EditDescriptionFragment fragment)
{
if (fragment == null)
{
FragmentManager fm = ((FragmentActivity)getContext()).getSupportFragmentManager();
fragment = (EditDescriptionFragment)fm.findFragmentByTag(EditDescriptionFragment.class.getName());
}
if (fragment == null)
return;
fragment.setSaveDescriptionListener(new EditDescriptionFragment.OnDescriptionSavedListener()
{
@Override
public void onSaved(Bookmark bookmark)
{
final Bookmark updatedBookmark = BookmarkManager.INSTANCE.getBookmark(bookmark.getCategoryId(), bookmark.getBookmarkId());
setMapObject(updatedBookmark);
Statistics.INSTANCE.trackEvent(Statistics.EventName.BMK_DESCRIPTION_CHANGED);
}
});
}
@Override
public void onClick(View v)
{
switch (v.getId())
{
case R.id.iv__bookmark_color:
saveBookmarkNameIfUpdated();
selectBookmarkColor();
break;
case R.id.ll__bookmark:
Statistics.INSTANCE.trackEvent(Statistics.EventName.PP_BOOKMARK);
AlohaHelper.logClick(AlohaHelper.PP_BOOKMARK);
toggleIsBookmark();
break;
case R.id.ll__share:
Statistics.INSTANCE.trackEvent(Statistics.EventName.PP_SHARE);
AlohaHelper.logClick(AlohaHelper.PP_SHARE);
ShareOption.ANY.shareMapObject((Activity) getContext(), mMapObject);
break;
case R.id.ll__api_back:
final Activity activity = (Activity) getContext();
if (ParsedMwmRequest.hasRequest())
{
final ParsedMwmRequest request = ParsedMwmRequest.getCurrentRequest();
if (ParsedMwmRequest.isPickPointMode())
request.setPointData(mMapObject.getLat(), mMapObject.getLon(), mMapObject.getName(), "");
request.sendResponseAndFinish(activity, true);
}
else
activity.finish();
break;
case R.id.ll__place_latlon:
mIsLatLonDms = !mIsLatLonDms;
MwmApplication.prefs().edit().putBoolean(PREF_USE_DMS, mIsLatLonDms).commit();
refreshLatLon();
break;
case R.id.ll__place_phone:
Intent intent = new Intent(Intent.ACTION_DIAL);
intent.setData(Uri.parse("tel:" + mTvPhone.getText()));
try
{
getContext().startActivity(intent);
} catch (ActivityNotFoundException e)
{
AlohaHelper.logException(e);
}
break;
case R.id.ll__place_website:
followUrl(mTvWebsite.getText().toString());
break;
case R.id.ll__place_wiki:
// TODO: Refactor and use separate getters for Wiki and all other PP meta info too.
followUrl(mMapObject.getMetadata(Metadata.MetadataType.FMD_WIKIPEDIA));
break;
case R.id.tv__bookmark_group:
saveBookmarkNameIfUpdated();
selectBookmarkSet();
break;
case R.id.av__direction:
Statistics.INSTANCE.trackEvent(Statistics.EventName.PP_DIRECTION_ARROW);
AlohaHelper.logClick(AlohaHelper.PP_DIRECTION_ARROW);
showBigDirection();
break;
case R.id.ll__place_email:
intent = new Intent(Intent.ACTION_SENDTO);
intent.setData(Utils.buildMailUri(mTvEmail.getText().toString(), "", ""));
getContext().startActivity(intent);
break;
case R.id.tv__bookmark_notes:
case R.id.tv__description:
case R.id.btn__edit_html_bookmark:
saveBookmarkNameIfUpdated();
final Bundle args = new Bundle();
args.putParcelable(EditDescriptionFragment.EXTRA_BOOKMARK, mMapObject);
String name = EditDescriptionFragment.class.getName();
final EditDescriptionFragment fragment = (EditDescriptionFragment) Fragment.instantiate(getContext(), name, args);
fragment.setArguments(args);
fragment.show(((FragmentActivity) getContext()).getSupportFragmentManager(), name);
subscribeBookmarkEditFragment(fragment);
break;
case R.id.from:
if (RoutingController.get().setStartPoint(mMapObject))
hide();
break;
case R.id.to:
if (RoutingController.get().setEndPoint(mMapObject))
hide();
break;
}
}
private void followUrl(String url)
{
final Intent intent = new Intent(Intent.ACTION_VIEW);
if (!url.startsWith("http://") && !url.startsWith("https://"))
url = "http://" + url;
intent.setData(Uri.parse(url));
getContext().startActivity(intent);
}
private void toggleIsBookmark()
{
if (mMapObject == null)
return;
if (mMapObject.getType() == MapObjectType.BOOKMARK)
{
final Bookmark currentBookmark = (Bookmark) mMapObject;
MapObject p;
if (mBookmarkedMapObject != null && LocationUtils.areLatLonEqual(mMapObject, mBookmarkedMapObject))
// use cached POI of bookmark, if it corresponds to current object
p = mBookmarkedMapObject;
else
p = Framework.nativeGetMapObjectForPoint(mMapObject.getLat(), mMapObject.getLon());
setMapObject(p);
setState(State.DETAILS);
BookmarkManager.INSTANCE.deleteBookmark(currentBookmark);
}
else
{
mBookmarkedMapObject = mMapObject;
final Bookmark newBmk = BookmarkManager.INSTANCE.getBookmark(BookmarkManager.INSTANCE.addNewBookmark(
mMapObject.getName(), mMapObject.getLat(), mMapObject.getLon()));
setMapObject(newBmk);
// FIXME this hack is necessary to get correct views height in animation controller. remove after further investigation.
post(new Runnable()
{
@Override
public void run()
{
setState(State.BOOKMARK);
}
});
}
}
private void selectBookmarkSet()
{
final FragmentActivity activity = (FragmentActivity) getContext();
final Bookmark bookmark = (Bookmark) mMapObject;
final Bundle args = new Bundle();
args.putInt(ChooseBookmarkCategoryFragment.CATEGORY_ID, bookmark.getCategoryId());
args.putInt(ChooseBookmarkCategoryFragment.BOOKMARK_ID, bookmark.getBookmarkId());
final ChooseBookmarkCategoryFragment fragment = (ChooseBookmarkCategoryFragment) Fragment.instantiate(activity, ChooseBookmarkCategoryFragment.class.getName(), args);
fragment.show(activity.getSupportFragmentManager(), null);
}
private void selectBookmarkColor()
{
final Bundle args = new Bundle();
args.putString(BookmarkColorDialogFragment.ICON_TYPE, ((Bookmark) mMapObject).getIcon().getType());
final BookmarkColorDialogFragment dialogFragment = (BookmarkColorDialogFragment) BookmarkColorDialogFragment.
instantiate(getContext(), BookmarkColorDialogFragment.class.getName(), args);
dialogFragment.setOnColorSetListener(new BookmarkColorDialogFragment.OnBookmarkColorChangeListener()
{
@Override
public void onBookmarkColorSet(int colorPos)
{
Bookmark bmk = (Bookmark) mMapObject;
final Icon newIcon = BookmarkManager.getIcons().get(colorPos);
final String from = bmk.getIcon().getName();
final String to = newIcon.getName();
if (!TextUtils.equals(from, to))
Statistics.INSTANCE.trackColorChanged(from, to);
bmk.setParams(bmk.getName(), newIcon, bmk.getBookmarkDescription());
bmk = BookmarkManager.INSTANCE.getBookmark(bmk.getCategoryId(), bmk.getBookmarkId());
setMapObject(bmk);
}
});
dialogFragment.show(((FragmentActivity) getContext()).getSupportFragmentManager(), null);
}
private void showBigDirection()
{
final FragmentActivity hostActivity = (FragmentActivity) getContext();
final DirectionFragment fragment = (DirectionFragment) Fragment.instantiate(hostActivity, DirectionFragment.class.getName(), null);
fragment.setMapObject(mMapObject);
fragment.show(hostActivity.getSupportFragmentManager(), null);
}
@Override
public boolean onLongClick(View v)
{
final Object tag = v.getTag();
final String tagStr = tag == null ? "" : tag.toString();
AlohaHelper.logLongClick(tagStr);
final PopupMenu popup = new PopupMenu(getContext(), v);
final Menu menu = popup.getMenu();
final List<String> items = new ArrayList<>();
switch (v.getId())
{
case R.id.ll__place_latlon:
final double lat = mMapObject.getLat();
final double lon = mMapObject.getLon();
items.add(Framework.nativeFormatLatLon(lat, lon, false));
items.add(Framework.nativeFormatLatLon(lat, lon, true));
break;
case R.id.ll__place_website:
items.add(mTvWebsite.getText().toString());
break;
case R.id.ll__place_email:
items.add(mTvEmail.getText().toString());
break;
case R.id.ll__place_phone:
items.add(mTvPhone.getText().toString());
break;
case R.id.ll__place_schedule:
items.add(mTvSchedule.getText().toString());
break;
case R.id.ll__place_operator:
items.add(mTvOperator.getText().toString());
break;
case R.id.ll__place_wiki:
items.add(mMapObject.getMetadata(Metadata.MetadataType.FMD_WIKIPEDIA));
break;
}
final String copyText = getResources().getString(android.R.string.copy);
for (int i = 0; i < items.size(); i++)
menu.add(Menu.NONE, i, i, String.format("%s %s", copyText, items.get(i)));
popup.setOnMenuItemClickListener(new PopupMenu.OnMenuItemClickListener()
{
@Override
public boolean onMenuItemClick(MenuItem item)
{
final int id = item.getItemId();
final Context ctx = getContext();
Utils.copyTextToClipboard(ctx, items.get(id));
Utils.toastShortcut(ctx, ctx.getString(R.string.copied_to_clipboard, items.get(id)));
Statistics.INSTANCE.trackEvent(Statistics.EventName.PP_METADATA_COPY + ":" + tagStr);
AlohaHelper.logClick(AlohaHelper.PP_METADATA_COPY + ":" + tagStr);
return true;
}
});
popup.show();
return true;
}
public int getDockedWidth()
{
int res = getWidth();
return (res == 0 ? getLayoutParams().width : res);
}
public MwmActivity.LeftAnimationTrackListener getLeftAnimationTrackListener()
{
return mLeftAnimationTrackListener;
}
public void setLeftAnimationTrackListener(MwmActivity.LeftAnimationTrackListener listener)
{
mLeftAnimationTrackListener = listener;
}
public void hide()
{
setState(State.HIDDEN);
}
@SuppressWarnings("SimplifiableIfStatement")
public boolean hideOnTouch()
{
if (mIsDocked || mIsFloating)
return false;
if (getState() == State.BOOKMARK || getState() == State.DETAILS)
{
hide();
return true;
}
return false;
}
}
| |
/*
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.dns;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import com.google.api.services.dns.model.Change;
import com.google.common.collect.ImmutableList;
import org.junit.Test;
import java.util.List;
public class ChangeRequestInfoTest {
private static final String GENERATED_ID = "cr-id-1";
private static final Long START_TIME_MILLIS = 12334567890L;
private static final ChangeRequest.Status STATUS = ChangeRequest.Status.PENDING;
private static final String NAME1 = "dns1";
private static final RecordSet.Type TYPE1 = RecordSet.Type.A;
private static final String NAME2 = "dns2";
private static final RecordSet.Type TYPE2 = RecordSet.Type.AAAA;
private static final String NAME3 = "dns3";
private static final RecordSet.Type TYPE3 = RecordSet.Type.MX;
private static final RecordSet RECORD1 = RecordSet.newBuilder(NAME1, TYPE1).build();
private static final RecordSet RECORD2 = RecordSet.newBuilder(NAME2, TYPE2).build();
private static final RecordSet RECORD3 = RecordSet.newBuilder(NAME3, TYPE3).build();
private static final List<RecordSet> ADDITIONS = ImmutableList.of(RECORD1, RECORD2);
private static final List<RecordSet> DELETIONS = ImmutableList.of(RECORD3);
private static final ChangeRequestInfo CHANGE = ChangeRequest.newBuilder()
.add(RECORD1)
.add(RECORD2)
.delete(RECORD3)
.setStartTime(START_TIME_MILLIS)
.setStatus(STATUS)
.setGeneratedId(GENERATED_ID)
.build();
private static final ChangeRequestInfo DEPRECATED_CHANGE = ChangeRequest.builder()
.add(RECORD1)
.add(RECORD2)
.delete(RECORD3)
.setStartTime(START_TIME_MILLIS)
.setStatus(STATUS)
.setGeneratedId(GENERATED_ID)
.build();
@Test
public void testEmptyBuilder() {
ChangeRequestInfo cr = ChangeRequest.newBuilder().build();
assertNotNull(cr.getDeletions());
assertTrue(cr.getDeletions().isEmpty());
assertNotNull(cr.getAdditions());
assertTrue(cr.getAdditions().isEmpty());
}
@Test
public void testEmptyBuilderDeprecated() {
ChangeRequestInfo cr = ChangeRequest.builder().build();
assertNotNull(cr.deletions());
assertTrue(cr.deletions().isEmpty());
assertNotNull(cr.additions());
assertTrue(cr.additions().isEmpty());
}
@Test
public void testBuilder() {
assertEquals(GENERATED_ID, CHANGE.getGeneratedId());
assertEquals(STATUS, CHANGE.status());
assertEquals(START_TIME_MILLIS, CHANGE.getStartTimeMillis());
assertEquals(ADDITIONS, CHANGE.getAdditions());
assertEquals(DELETIONS, CHANGE.getDeletions());
List<RecordSet> recordList = ImmutableList.of(RECORD1);
ChangeRequestInfo another = CHANGE.toBuilder().setAdditions(recordList).build();
assertEquals(recordList, another.getAdditions());
assertEquals(CHANGE.getDeletions(), another.getDeletions());
another = CHANGE.toBuilder().setDeletions(recordList).build();
assertEquals(recordList, another.getDeletions());
assertEquals(CHANGE.getAdditions(), another.getAdditions());
}
@Test
public void testBuilderDeprecated() {
assertEquals(GENERATED_ID, DEPRECATED_CHANGE.generatedId());
assertEquals(STATUS, DEPRECATED_CHANGE.status());
assertEquals(START_TIME_MILLIS, DEPRECATED_CHANGE.startTimeMillis());
assertEquals(ADDITIONS, DEPRECATED_CHANGE.additions());
assertEquals(DELETIONS, DEPRECATED_CHANGE.deletions());
List<RecordSet> recordList = ImmutableList.of(RECORD1);
ChangeRequestInfo another = DEPRECATED_CHANGE.toBuilder().additions(recordList).build();
assertEquals(recordList, another.additions());
assertEquals(DEPRECATED_CHANGE.deletions(), another.deletions());
another = DEPRECATED_CHANGE.toBuilder().deletions(recordList).build();
assertEquals(recordList, another.deletions());
assertEquals(DEPRECATED_CHANGE.additions(), another.additions());
}
@Test
public void testEqualsAndNotEquals() {
ChangeRequestInfo clone = CHANGE.toBuilder().build();
assertEquals(CHANGE, clone);
clone = ChangeRequest.fromPb(CHANGE.toPb());
assertEquals(CHANGE, clone);
clone = CHANGE.toBuilder().setGeneratedId("some-other-id").build();
assertNotEquals(CHANGE, clone);
clone = CHANGE.toBuilder().setStartTime(CHANGE.getStartTimeMillis() + 1).build();
assertNotEquals(CHANGE, clone);
clone = CHANGE.toBuilder().add(RECORD3).build();
assertNotEquals(CHANGE, clone);
clone = CHANGE.toBuilder().delete(RECORD1).build();
assertNotEquals(CHANGE, clone);
ChangeRequestInfo empty = ChangeRequest.newBuilder().build();
assertNotEquals(CHANGE, empty);
assertEquals(empty, ChangeRequest.newBuilder().build());
}
@Test
public void testSameHashCodeOnEquals() {
ChangeRequestInfo clone = CHANGE.toBuilder().build();
assertEquals(CHANGE, clone);
assertEquals(CHANGE.hashCode(), clone.hashCode());
ChangeRequestInfo empty = ChangeRequest.newBuilder().build();
assertEquals(empty.hashCode(), ChangeRequest.newBuilder().build().hashCode());
}
@Test
public void testToAndFromPb() {
assertEquals(CHANGE, ChangeRequest.fromPb(CHANGE.toPb()));
ChangeRequestInfo partial = ChangeRequest.newBuilder().build();
assertEquals(partial, ChangeRequest.fromPb(partial.toPb()));
partial = ChangeRequest.newBuilder().setGeneratedId(GENERATED_ID).build();
assertEquals(partial, ChangeRequest.fromPb(partial.toPb()));
partial = ChangeRequest.newBuilder().add(RECORD1).build();
assertEquals(partial, ChangeRequest.fromPb(partial.toPb()));
partial = ChangeRequest.newBuilder().delete(RECORD1).build();
assertEquals(partial, ChangeRequest.fromPb(partial.toPb()));
partial = ChangeRequest.newBuilder().setAdditions(ADDITIONS).build();
assertEquals(partial, ChangeRequest.fromPb(partial.toPb()));
partial = ChangeRequest.newBuilder().setDeletions(DELETIONS).build();
assertEquals(partial, ChangeRequest.fromPb(partial.toPb()));
partial = ChangeRequest.newBuilder().setStartTime(START_TIME_MILLIS).build();
assertEquals(partial, ChangeRequest.fromPb(partial.toPb()));
partial = ChangeRequest.newBuilder().setStatus(STATUS).build();
assertEquals(partial, ChangeRequest.fromPb(partial.toPb()));
}
@Test
public void testToBuilder() {
assertEquals(CHANGE, CHANGE.toBuilder().build());
ChangeRequestInfo partial = ChangeRequest.newBuilder().build();
assertEquals(partial, partial.toBuilder().build());
partial = ChangeRequest.newBuilder().setGeneratedId(GENERATED_ID).build();
assertEquals(partial, partial.toBuilder().build());
partial = ChangeRequest.newBuilder().add(RECORD1).build();
assertEquals(partial, partial.toBuilder().build());
partial = ChangeRequest.newBuilder().delete(RECORD1).build();
assertEquals(partial, partial.toBuilder().build());
partial = ChangeRequest.newBuilder().setAdditions(ADDITIONS).build();
assertEquals(partial, partial.toBuilder().build());
partial = ChangeRequest.newBuilder().setDeletions(DELETIONS).build();
assertEquals(partial, partial.toBuilder().build());
partial = ChangeRequest.newBuilder().setStartTime(START_TIME_MILLIS).build();
assertEquals(partial, partial.toBuilder().build());
partial = ChangeRequest.newBuilder().setStatus(STATUS).build();
assertEquals(partial, partial.toBuilder().build());
}
@Test
public void testClearAdditions() {
ChangeRequestInfo clone = CHANGE.toBuilder().clearAdditions().build();
assertTrue(clone.getAdditions().isEmpty());
assertFalse(clone.getDeletions().isEmpty());
}
@Test
public void testAddAddition() {
try {
CHANGE.toBuilder().add(null);
fail("Should not be able to add null RecordSet.");
} catch (NullPointerException e) {
// expected
}
ChangeRequestInfo clone = CHANGE.toBuilder().add(RECORD1).build();
assertEquals(CHANGE.getAdditions().size() + 1, clone.getAdditions().size());
}
@Test
public void testAddDeletion() {
try {
CHANGE.toBuilder().delete(null);
fail("Should not be able to delete null RecordSet.");
} catch (NullPointerException e) {
// expected
}
ChangeRequestInfo clone = CHANGE.toBuilder().delete(RECORD1).build();
assertEquals(CHANGE.getDeletions().size() + 1, clone.getDeletions().size());
}
@Test
public void testClearDeletions() {
ChangeRequestInfo clone = CHANGE.toBuilder().clearDeletions().build();
assertTrue(clone.getDeletions().isEmpty());
assertFalse(clone.getAdditions().isEmpty());
}
@Test
public void testRemoveAddition() {
ChangeRequestInfo clone = CHANGE.toBuilder().removeAddition(RECORD1).build();
assertTrue(clone.getAdditions().contains(RECORD2));
assertFalse(clone.getAdditions().contains(RECORD1));
assertTrue(clone.getDeletions().contains(RECORD3));
clone = CHANGE.toBuilder().removeAddition(RECORD2).removeAddition(RECORD1).build();
assertFalse(clone.getAdditions().contains(RECORD2));
assertFalse(clone.getAdditions().contains(RECORD1));
assertTrue(clone.getAdditions().isEmpty());
assertTrue(clone.getDeletions().contains(RECORD3));
}
@Test
public void testRemoveDeletion() {
ChangeRequestInfo clone = CHANGE.toBuilder().removeDeletion(RECORD3).build();
assertTrue(clone.getDeletions().isEmpty());
}
@Test
public void testDateParsing() {
String startTime = "2016-01-26T18:33:43.512Z"; // obtained from service
Change change = CHANGE.toPb().setStartTime(startTime);
ChangeRequestInfo converted = ChangeRequest.fromPb(change);
assertNotNull(converted.getStartTimeMillis());
assertEquals(change, converted.toPb());
assertEquals(change.getStartTime(), converted.toPb().getStartTime());
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package sk.arsi.netbeans.gradle.android.maven.dialog;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Image;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.StringTokenizer;
import java.util.concurrent.atomic.AtomicLong;
import javax.swing.Action;
import javax.swing.JButton;
import javax.swing.JPanel;
import javax.swing.SwingUtilities;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import javax.swing.text.BadLocationException;
import javax.swing.text.Document;
import javax.swing.tree.TreeSelectionModel;
import org.netbeans.api.annotations.common.StaticResource;
import org.netbeans.modules.maven.indexer.api.NBVersionInfo;
import org.openide.DialogDescriptor;
import org.openide.NotificationLineSupport;
import org.openide.explorer.ExplorerManager;
import org.openide.explorer.view.BeanTreeView;
import org.openide.nodes.AbstractNode;
import org.openide.nodes.Children;
import org.openide.nodes.FilterNode;
import org.openide.nodes.Node;
import org.openide.util.ImageUtilities;
import org.openide.util.Lookup;
import org.openide.util.RequestProcessor;
import org.openide.util.WeakListeners;
import org.openide.util.lookup.AbstractLookup;
import org.openide.util.lookup.InstanceContent;
import org.openide.util.lookup.Lookups;
import sk.arsi.netbeans.gradle.android.maven.MavenDependencyInfo;
import sk.arsi.netbeans.gradle.android.maven.MavenSearchProvider;
import sk.arsi.netbeans.gradle.android.maven.RepoSearchListener;
import sk.arsi.netbeans.gradle.android.maven.repository.Repository;
/**
* @author mkleint
* @author arsi NBANDROID MOD
*/
public class AddDependencyPanel extends javax.swing.JPanel {
private static final Object LOCK = new Object();
private static AbstractNode searchingNode;
private static AbstractNode tooGeneralNode;
private static AbstractNode noResultsNode;
private NotificationLineSupport nls;
private static final @StaticResource
String EMPTY_ICON = "sk/arsi/netbeans/gradle/android/maven/dialog/empty.png";
private static final @StaticResource
String WAIT_ICON = "sk/arsi/netbeans/gradle/android/maven/dialog/wait.gif";
private static final @StaticResource
String MAVEN_ICON = "sk/arsi/netbeans/gradle/android/maven/dialog/maven.png";
private static final @StaticResource
String JFROG_ICON = "sk/arsi/netbeans/gradle/android/maven/dialog/jfrog.png";
private static final @StaticResource
String GOOGLE_ICON = "sk/arsi/netbeans/gradle/android/maven/dialog/google.png";
private static final @StaticResource
String PACKAGE_ICON = "sk/arsi/netbeans/gradle/android/maven/dialog/package.png";
private static final RequestProcessor RPofQueryPanel = new RequestProcessor(AddDependencyPanel.QueryPanel.class.getName() + "-Search", 10);
private static final RequestProcessor UPDATE_PROCESSOR = new RequestProcessor(AddDependencyPanel.QueryPanel.class.getName() + "-Node-Update", 1);
private QueryPanel queryPanel;
private static final AtomicLong searchId = new AtomicLong(0);
private final List<Repository> repositories;
private final List<String> currentPackages;
private final List<MavenDependencyInfo> googleDependenciesInfos = new ArrayList<>();
private final List<MavenDependencyInfo> jcenterDependenciesInfos = new ArrayList<>();
private final List<MavenDependencyInfo> mavenDependenciesInfos = new ArrayList<>();
private final List<MavenDependencyInfo> installedDependenciesInfos = new ArrayList<>();
private boolean jcenterPartial = true;
private boolean googlePartial = true;
private boolean mavenPartial = true;
private String selected = null;
/**
* Creates new form AddDependenCy
*/
public AddDependencyPanel(List<Repository> repositories, List<String> currentPackages) {
initComponents();
this.repositories = repositories;
this.currentPackages = currentPackages;
for (String currentPackage : currentPackages) {
StringTokenizer tok = new StringTokenizer(currentPackage, ":", false);
if (tok.countTokens() > 1) {
String grp = tok.nextToken();
String packg = tok.nextToken();
installedDependenciesInfos.add(new MavenDependencyInfo(MavenDependencyInfo.Type.MAVEN, grp, packg));
installedDependenciesInfos.add(new MavenDependencyInfo(MavenDependencyInfo.Type.GOOGLE, grp, packg));
installedDependenciesInfos.add(new MavenDependencyInfo(MavenDependencyInfo.Type.JCENTER, grp, packg));
}
}
Runnable runnable = new Runnable() {
@Override
public void run() {
queryPanel = new QueryPanel();
resultsPanel.add(queryPanel, BorderLayout.CENTER);
searchField.getDocument().addDocumentListener(
DelayedDocumentChangeListener.create(
searchField.getDocument(), queryPanel, 1500));
}
};
SwingUtilities.invokeLater(runnable);
}
public AddDependencyPanel(List<Repository> repositories, List<String> currentPackages, List<MavenDependencyInfo> index) {
initComponents();
this.repositories = repositories;
this.currentPackages = currentPackages;
for (String currentPackage : currentPackages) {
StringTokenizer tok = new StringTokenizer(currentPackage, ":", false);
if (tok.countTokens() > 1) {
String grp = tok.nextToken();
String packg = tok.nextToken();
installedDependenciesInfos.add(new MavenDependencyInfo(MavenDependencyInfo.Type.MAVEN, grp, packg));
installedDependenciesInfos.add(new MavenDependencyInfo(MavenDependencyInfo.Type.GOOGLE, grp, packg));
installedDependenciesInfos.add(new MavenDependencyInfo(MavenDependencyInfo.Type.JCENTER, grp, packg));
}
}
Runnable runnable = new Runnable() {
@Override
public void run() {
queryPanel = new QueryPanel();
resultsPanel.add(queryPanel, BorderLayout.CENTER);
labelQ.setVisible(false);
searchField.setVisible(false);
labelHelp.setVisible(false);
googleDependenciesInfos.addAll(index);
jcenterPartial = false;
googlePartial = false;
mavenPartial = false;
queryPanel.updateResults();
}
};
SwingUtilities.invokeLater(runnable);
}
public String getSelected() {
return selected;
}
public void attachDialogDisplayer(DialogDescriptor dd) {
nls = dd.getNotificationLineSupport();
if (nls == null) {
nls = dd.createNotificationLineSupport();
}
}
public JButton getOkButton() {
return ok;
}
public JButton getCancelButton() {
return cancel;
}
private void changeSelection(Lookup lookup) {
MavenDependencyInfo dependencyInfo = lookup.lookup(MavenDependencyInfo.class);
MavenDependencyInfo.Version version = lookup.lookup(MavenDependencyInfo.Version.class);
if (version != null) {
labelGroup.setText(version.getGroupId());
labelArtifact.setText(version.getArtifactId());
labelVersion.setText(version.getVersion());
selected = version.getGradleLine();
} else if (dependencyInfo != null) {
labelGroup.setText(dependencyInfo.getGroupId());
labelArtifact.setText(dependencyInfo.getArtifactId());
labelVersion.setText("+");
selected = dependencyInfo.getGradleLine() + ":+";
} else {
labelGroup.setText("...");
labelArtifact.setText("...");
labelVersion.setText("...");
selected = null;
}
ok.setEnabled(selected != null);
}
@Override
public void addNotify() {
super.addNotify();
assert nls != null : " The notificationLineSupport was not attached to the panel."; //NOI18N
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
jLabel1 = new javax.swing.JLabel();
jLabel2 = new javax.swing.JLabel();
jLabel3 = new javax.swing.JLabel();
labelGroup = new javax.swing.JLabel();
labelArtifact = new javax.swing.JLabel();
labelVersion = new javax.swing.JLabel();
jTabbedPane1 = new javax.swing.JTabbedPane();
jPanel1 = new javax.swing.JPanel();
labelQ = new javax.swing.JLabel();
searchField = new javax.swing.JTextField();
labelHelp = new javax.swing.JLabel();
resultsLabel = new javax.swing.JLabel();
resultsPanel = new javax.swing.JPanel();
ok = new javax.swing.JButton();
cancel = new javax.swing.JButton();
org.openide.awt.Mnemonics.setLocalizedText(jLabel1, org.openide.util.NbBundle.getMessage(AddDependencyPanel.class, "AddDependencyPanel.jLabel1.text")); // NOI18N
org.openide.awt.Mnemonics.setLocalizedText(jLabel2, org.openide.util.NbBundle.getMessage(AddDependencyPanel.class, "AddDependencyPanel.jLabel2.text")); // NOI18N
org.openide.awt.Mnemonics.setLocalizedText(jLabel3, org.openide.util.NbBundle.getMessage(AddDependencyPanel.class, "AddDependencyPanel.jLabel3.text")); // NOI18N
labelGroup.setFont(new java.awt.Font("Arial", 1, 14)); // NOI18N
org.openide.awt.Mnemonics.setLocalizedText(labelGroup, org.openide.util.NbBundle.getMessage(AddDependencyPanel.class, "AddDependencyPanel.labelGroup.text")); // NOI18N
labelArtifact.setFont(new java.awt.Font("Arial", 1, 14)); // NOI18N
org.openide.awt.Mnemonics.setLocalizedText(labelArtifact, org.openide.util.NbBundle.getMessage(AddDependencyPanel.class, "AddDependencyPanel.labelArtifact.text")); // NOI18N
labelVersion.setFont(new java.awt.Font("Arial", 1, 14)); // NOI18N
org.openide.awt.Mnemonics.setLocalizedText(labelVersion, org.openide.util.NbBundle.getMessage(AddDependencyPanel.class, "AddDependencyPanel.labelVersion.text")); // NOI18N
org.openide.awt.Mnemonics.setLocalizedText(labelQ, org.openide.util.NbBundle.getMessage(AddDependencyPanel.class, "AddDependencyPanel.labelQ.text")); // NOI18N
searchField.setText(org.openide.util.NbBundle.getMessage(AddDependencyPanel.class, "AddDependencyPanel.searchField.text")); // NOI18N
labelHelp.setForeground(new java.awt.Color(153, 153, 153));
org.openide.awt.Mnemonics.setLocalizedText(labelHelp, org.openide.util.NbBundle.getMessage(AddDependencyPanel.class, "AddDependencyPanel.labelHelp.text")); // NOI18N
org.openide.awt.Mnemonics.setLocalizedText(resultsLabel, org.openide.util.NbBundle.getMessage(AddDependencyPanel.class, "AddDependencyPanel.resultsLabel.text")); // NOI18N
resultsPanel.setLayout(new java.awt.BorderLayout());
org.openide.awt.Mnemonics.setLocalizedText(ok, org.openide.util.NbBundle.getMessage(AddDependencyPanel.class, "AddDependencyPanel.ok.text")); // NOI18N
ok.setEnabled(false);
org.openide.awt.Mnemonics.setLocalizedText(cancel, org.openide.util.NbBundle.getMessage(AddDependencyPanel.class, "AddDependencyPanel.cancel.text")); // NOI18N
javax.swing.GroupLayout jPanel1Layout = new javax.swing.GroupLayout(jPanel1);
jPanel1.setLayout(jPanel1Layout);
jPanel1Layout.setHorizontalGroup(
jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addContainerGap()
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(resultsPanel, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addGroup(jPanel1Layout.createSequentialGroup()
.addComponent(labelQ)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addComponent(labelHelp)
.addGap(0, 477, Short.MAX_VALUE))
.addComponent(searchField)))
.addGroup(jPanel1Layout.createSequentialGroup()
.addComponent(resultsLabel)
.addGap(0, 0, Short.MAX_VALUE))
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel1Layout.createSequentialGroup()
.addGap(0, 0, Short.MAX_VALUE)
.addComponent(ok)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(cancel)))
.addContainerGap())
);
jPanel1Layout.setVerticalGroup(
jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addContainerGap()
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(labelQ)
.addComponent(searchField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(labelHelp)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(resultsLabel)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(resultsPanel, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(ok)
.addComponent(cancel))
.addContainerGap())
);
jTabbedPane1.addTab(org.openide.util.NbBundle.getMessage(AddDependencyPanel.class, "AddDependencyPanel.jPanel1.TabConstraints.tabTitle"), jPanel1); // NOI18N
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this);
this.setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jLabel1)
.addComponent(jLabel2)
.addComponent(jLabel3))
.addGap(18, 18, 18)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(labelVersion)
.addComponent(labelArtifact)
.addComponent(labelGroup))
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
.addComponent(jTabbedPane1)
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel1)
.addComponent(labelGroup))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel2)
.addComponent(labelArtifact))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel3)
.addComponent(labelVersion))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(jTabbedPane1))
);
}// </editor-fold>//GEN-END:initComponents
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JButton cancel;
private javax.swing.JLabel jLabel1;
private javax.swing.JLabel jLabel2;
private javax.swing.JLabel jLabel3;
private javax.swing.JPanel jPanel1;
private javax.swing.JTabbedPane jTabbedPane1;
private javax.swing.JLabel labelArtifact;
private javax.swing.JLabel labelGroup;
private javax.swing.JLabel labelHelp;
private javax.swing.JLabel labelQ;
private javax.swing.JLabel labelVersion;
private javax.swing.JButton ok;
private javax.swing.JLabel resultsLabel;
private javax.swing.JPanel resultsPanel;
private javax.swing.JTextField searchField;
// End of variables declaration//GEN-END:variables
private class QueryPanel extends JPanel implements ExplorerManager.Provider,
Comparator<MavenDependencyInfo>, PropertyChangeListener, ChangeListener, RepoSearchListener {
private final BeanTreeView btv;
private final ExplorerManager manager;
private final ResultsRootNode resultsRootNode;
private String inProgressText, lastQueryText, curTypedText;
private final Color defSearchC;
private QueryPanel() {
btv = new BeanTreeView();
btv.setRootVisible(false);
btv.setDefaultActionAllowed(true);
btv.setSelectionMode(TreeSelectionModel.SINGLE_TREE_SELECTION);
manager = new ExplorerManager();
setLayout(new BorderLayout());
add(btv, BorderLayout.CENTER);
defSearchC = AddDependencyPanel.this.searchField.getForeground();
manager.addPropertyChangeListener(this);
AddDependencyPanel.this.resultsLabel.setLabelFor(btv);
btv.getAccessibleContext().setAccessibleDescription(AddDependencyPanel.this.resultsLabel.getAccessibleContext().getAccessibleDescription());
resultsRootNode = new ResultsRootNode();
manager.setRootContext(resultsRootNode);
}
/**
* delayed change of query text
*/
@Override
public void stateChanged(ChangeEvent e) {
Document doc = (Document) e.getSource();
try {
curTypedText = doc.getText(0, doc.getLength()).trim();
} catch (BadLocationException ex) {
// should never happen, nothing we can do probably
return;
}
AddDependencyPanel.this.searchField.setForeground(defSearchC);
if (curTypedText.length() > 0) {
find(curTypedText);
}
}
void find(String queryText) {
synchronized (LOCK) {
changeSelection(Lookup.EMPTY);
if (queryText.equals(lastQueryText)) {
return;
}
lastQueryText = queryText;
}
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
resultsRootNode.setOneChild(getSearchingNode());
AddDependencyPanel.this.searchField.setForeground(defSearchC);
AddDependencyPanel.this.nls.clearMessages();
}
});
jcenterPartial = true;
mavenPartial = true;
googlePartial = true;
long currentSearchId = searchId.incrementAndGet();
Collection<? extends MavenSearchProvider> providers = Lookup.getDefault().lookupAll(MavenSearchProvider.class);
for (MavenSearchProvider next : providers) {
next.searchPackageName(queryText, "jcenter", WeakListeners.create(RepoSearchListener.class, (RepoSearchListener) this, null), currentSearchId, repositories);
}
}
@Override
public ExplorerManager getExplorerManager() {
return manager;
}
void updateResults() {
final List<MavenDependencyInfo> dependencyInfos = new ArrayList<>();
dependencyInfos.addAll(googleDependenciesInfos);
dependencyInfos.addAll(jcenterDependenciesInfos);
dependencyInfos.addAll(mavenDependenciesInfos);
dependencyInfos.removeAll(installedDependenciesInfos);
Collections.sort(dependencyInfos, QueryPanel.this);
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
updateResultNodes(dependencyInfos);
}
});
}
private void updateResultNodes(List<MavenDependencyInfo> dependencyInfos) {
if (dependencyInfos.size() > 0) { // some results available
Map<MavenDependencyInfo, Node> currentNodes = new HashMap<>();
for (Node nd : resultsRootNode.getChildren().getNodes()) {
currentNodes.put(nd.getLookup().lookup(MavenDependencyInfo.class), nd);
}
List<Node> newNodes = new ArrayList<Node>(dependencyInfos.size());
// still searching?
if (mavenPartial || jcenterPartial || googlePartial) {
newNodes.add(getSearchingNode());
}
for (MavenDependencyInfo key : dependencyInfos) {
Node nd;
nd = currentNodes.get(key);
if (null != nd) {
((ArtifactNode) ((AddDependencyPanel.FilterNodeWithDefAction) nd).getOriginal()).setVersionInfos(key.getVersions());
} else {
nd = createFilterWithDefaultAction(new ArtifactNode(key), false);
}
newNodes.add(nd);
}
resultsRootNode.setNewChildren(newNodes);
} else if (googlePartial || jcenterPartial || mavenPartial) { // still searching, no results yet
resultsRootNode.setOneChild(getSearchingNode());
} else { // finished searching with no results
resultsRootNode.setOneChild(getNoResultsNode());
}
}
/**
* Impl of comparator, sorts artifacts asfabetically with exception of
* items that contain current query string, which take precedence.
*/
@Override
public int compare(MavenDependencyInfo s1, MavenDependencyInfo s2) {
return s1.getGradleLine().compareTo(s2.getGradleLine());
}
/**
* PropertyChangeListener impl, stores maven coordinates of selected
* artifact
*/
@Override
public void propertyChange(PropertyChangeEvent evt) {
if (ExplorerManager.PROP_SELECTED_NODES.equals(evt.getPropertyName())) {
Node[] selNodes = manager.getSelectedNodes();
changeSelection(selNodes.length == 1 ? selNodes[0].getLookup() : Lookup.EMPTY);
}
}
private String key(NBVersionInfo nbvi) {
return nbvi.getGroupId() + ':' + nbvi.getArtifactId() + ':' + nbvi.getVersion();
}
@Override
public void searchDone(Type type, long searchId, boolean isPartial, List<MavenDependencyInfo> dependencyInfos) {
//handle only last search
if (searchId == AddDependencyPanel.searchId.get()) {
switch (type) {
case GOOGLE:
googleDependenciesInfos.clear();
googleDependenciesInfos.addAll(dependencyInfos);
googlePartial = isPartial;
break;
case JCENTER:
jcenterDependenciesInfos.clear();
jcenterDependenciesInfos.addAll(dependencyInfos);
jcenterPartial = isPartial;
break;
case MAVEN:
mavenDependenciesInfos.clear();
mavenDependenciesInfos.addAll(dependencyInfos);
mavenPartial = isPartial;
break;
default:
throw new AssertionError(type.name());
}
Runnable runnable = new Runnable() {
@Override
public void run() {
updateResults();
}
};
UPDATE_PROCESSOR.execute(runnable);
}
}
} // QueryPanel
public static class ArtifactNode extends AbstractNode {
private final MavenDependencyInfo dependencyInfo;
private List<MavenDependencyInfo.Version> versionInfos;
private final ArtifactNodeChildren myChildren;
public ArtifactNode(MavenDependencyInfo dependencyInfo) {
super(new ArtifactNodeChildren(dependencyInfo.getVersions()), Lookups.fixed(dependencyInfo));
myChildren = (ArtifactNodeChildren) getChildren();
this.versionInfos = dependencyInfo.getVersions();
this.dependencyInfo = dependencyInfo;
setName(dependencyInfo.getGradleLine());
setDisplayName(dependencyInfo.getGradleLine());
}
@Override
public Image getIcon(int type) {
switch(dependencyInfo.getType()){
case GOOGLE:
return ImageUtilities.loadImage(GOOGLE_ICON); //NOI18N
case JCENTER:
return ImageUtilities.loadImage(JFROG_ICON); //NOI18N
case MAVEN:
return ImageUtilities.loadImage(MAVEN_ICON); //NOI18N
default:
return super.getIcon(type);
}
}
@Override
public Image getOpenedIcon(int type) {
return getIcon(type);
}
public List<MavenDependencyInfo.Version> getVersionInfos() {
return new ArrayList<>(versionInfos);
}
public void setVersionInfos(List<MavenDependencyInfo.Version> versions) {
versionInfos = versions;
myChildren.setNewKeys(versions);
}
static class ArtifactNodeChildren extends Children.Keys<MavenDependencyInfo.Version> {
private List<MavenDependencyInfo.Version> keys;
public ArtifactNodeChildren(List<MavenDependencyInfo.Version> keys) {
this.keys = keys;
}
@Override
protected Node[] createNodes(MavenDependencyInfo.Version arg0) {
return new Node[]{new VersionNode(arg0)};
}
@Override
protected void addNotify() {
setKeys(keys);
}
protected void setNewKeys(List<MavenDependencyInfo.Version> keys) {
this.keys = keys;
setKeys(keys);
}
}
}
public static class VersionNode extends AbstractNode {
private final MavenDependencyInfo.Version nbvi;
/**
* Creates a new instance of VersionNode
*/
public VersionNode(MavenDependencyInfo.Version version) {
super(Children.LEAF, Lookups.fixed(version));
this.nbvi = version;
setName(version.getVersion());
setDisplayName(version.getVersion());
}
@Override
public Image getIcon(int type) {
return ImageUtilities.loadImage(PACKAGE_ICON); //NOI18N
}
@Override
public Image getOpenedIcon(int type) {
return getIcon(type);
}
public MavenDependencyInfo.Version getVersionInfo() {
return nbvi;
}
@Override
public String getShortDescription() {
return nbvi.toString();
}
}
private class ResultsRootNode extends AbstractNode {
private ResultsRootChildren resultsChildren;
public ResultsRootNode() {
this(new InstanceContent());
}
private ResultsRootNode(InstanceContent content) {
super(new ResultsRootChildren(), new AbstractLookup(content));
content.add(this);
this.resultsChildren = (ResultsRootChildren) getChildren();
}
public void setOneChild(Node n) {
List<Node> ch = new ArrayList<Node>(1);
ch.add(n);
setNewChildren(ch);
}
public void setNewChildren(List<Node> ch) {
resultsChildren.setNewChildren(ch);
}
}
private class ResultsRootChildren extends Children.Keys<Node> {
List<Node> myNodes;
public ResultsRootChildren() {
myNodes = Collections.EMPTY_LIST;
}
private void setNewChildren(List<Node> ch) {
myNodes = ch;
refreshList();
}
@Override
protected void addNotify() {
refreshList();
}
private void refreshList() {
List<Node> keys = new ArrayList();
for (Node node : myNodes) {
keys.add(node);
}
setKeys(keys);
}
@Override
protected Node[] createNodes(Node key) {
return new Node[]{key};
}
}
private static Node getSearchingNode() {
if (searchingNode == null) {
AbstractNode nd = new AbstractNode(Children.LEAF) {
@Override
public Image getIcon(int arg0) {
return ImageUtilities.loadImage(WAIT_ICON); //NOI18N
}
@Override
public Image getOpenedIcon(int arg0) {
return getIcon(arg0);
}
};
nd.setName("Searching"); //NOI18N
nd.setDisplayName("Searching.."); //NOI18N
searchingNode = nd;
}
return new FilterNode(searchingNode, Children.LEAF);
}
private static Node getTooGeneralNode() {
if (tooGeneralNode == null) {
AbstractNode nd = new AbstractNode(Children.LEAF) {
@Override
public Image getIcon(int arg0) {
return ImageUtilities.loadImage(EMPTY_ICON); //NOI18N
}
@Override
public Image getOpenedIcon(int arg0) {
return getIcon(arg0);
}
};
nd.setName("Too General"); //NOI18N
nd.setDisplayName("Too General"); //NOI18N
tooGeneralNode = nd;
}
return new FilterNode(tooGeneralNode, Children.LEAF);
}
private static Node getNoResultsNode() {
if (noResultsNode == null) {
AbstractNode nd = new AbstractNode(Children.LEAF) {
@Override
public Image getIcon(int arg0) {
return ImageUtilities.loadImage(EMPTY_ICON); //NOI18N
}
@Override
public Image getOpenedIcon(int arg0) {
return getIcon(arg0);
}
};
nd.setName("Empty"); //NOI18N
nd.setDisplayName("Empty"); //NOI18N
noResultsNode = nd;
}
return new FilterNode(noResultsNode, Children.LEAF);
}
private Node createFilterWithDefaultAction(final Node nd, boolean leaf) {
return new FilterNodeWithDefAction(nd, leaf);
}
class FilterNodeWithDefAction extends FilterNode {
public FilterNodeWithDefAction(Node nd, boolean leaf) {
super(nd, leaf ? Children.LEAF : new FilterNode.Children(nd) {
@Override
protected Node[] createNodes(Node key) {
return new Node[]{createFilterWithDefaultAction(key, true)};
}
});
}
@Override
public Action getPreferredAction() {
return super.getPreferredAction();
}
@Override
public Action[] getActions(boolean context) {
return new Action[0];
}
@Override
public Node getOriginal() {
return super.getOriginal();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions;
import java.nio.ByteOrder;
import org.apache.spark.unsafe.Platform;
import org.apache.spark.unsafe.types.UTF8String;
// scalastyle: off
/**
* xxHash64. A high quality and fast 64 bit hash code by Yann Colet and Mathias Westerdahl. The
* class below is modelled like its Murmur3_x86_32 cousin.
* <p/>
* This was largely based on the following (original) C and Java implementations:
* https://github.com/Cyan4973/xxHash/blob/master/xxhash.c
* https://github.com/OpenHFT/Zero-Allocation-Hashing/blob/master/src/main/java/net/openhft/hashing/XxHash_r39.java
* https://github.com/airlift/slice/blob/master/src/main/java/io/airlift/slice/XxHash64.java
*/
// scalastyle: on
public final class XXH64 {
private static final boolean isBigEndian = ByteOrder.nativeOrder().equals(ByteOrder.BIG_ENDIAN);
private static final long PRIME64_1 = 0x9E3779B185EBCA87L;
private static final long PRIME64_2 = 0xC2B2AE3D27D4EB4FL;
private static final long PRIME64_3 = 0x165667B19E3779F9L;
private static final long PRIME64_4 = 0x85EBCA77C2B2AE63L;
private static final long PRIME64_5 = 0x27D4EB2F165667C5L;
private final long seed;
public XXH64(long seed) {
super();
this.seed = seed;
}
@Override
public String toString() {
return "xxHash64(seed=" + seed + ")";
}
public long hashInt(int input) {
return hashInt(input, seed);
}
public static long hashInt(int input, long seed) {
long hash = seed + PRIME64_5 + 4L;
hash ^= (input & 0xFFFFFFFFL) * PRIME64_1;
hash = Long.rotateLeft(hash, 23) * PRIME64_2 + PRIME64_3;
return fmix(hash);
}
public long hashLong(long input) {
return hashLong(input, seed);
}
public static long hashLong(long input, long seed) {
long hash = seed + PRIME64_5 + 8L;
hash ^= Long.rotateLeft(input * PRIME64_2, 31) * PRIME64_1;
hash = Long.rotateLeft(hash, 27) * PRIME64_1 + PRIME64_4;
return fmix(hash);
}
public long hashUnsafeWords(Object base, long offset, int length) {
return hashUnsafeWords(base, offset, length, seed);
}
public static long hashUnsafeWords(Object base, long offset, int length, long seed) {
assert (length % 8 == 0) : "lengthInBytes must be a multiple of 8 (word-aligned)";
long hash = hashBytesByWords(base, offset, length, seed);
return fmix(hash);
}
public long hashUnsafeBytes(Object base, long offset, int length) {
return hashUnsafeBytes(base, offset, length, seed);
}
public static long hashUnsafeBytes(Object base, long offset, int length, long seed) {
assert (length >= 0) : "lengthInBytes cannot be negative";
long hash = hashBytesByWords(base, offset, length, seed);
long end = offset + length;
offset += length & -8;
if (offset + 4L <= end) {
int k1 = Platform.getInt(base, offset);
if (isBigEndian) {
k1 = Integer.reverseBytes(k1);
}
hash ^= (k1 & 0xFFFFFFFFL) * PRIME64_1;
hash = Long.rotateLeft(hash, 23) * PRIME64_2 + PRIME64_3;
offset += 4L;
}
while (offset < end) {
hash ^= (Platform.getByte(base, offset) & 0xFFL) * PRIME64_5;
hash = Long.rotateLeft(hash, 11) * PRIME64_1;
offset++;
}
return fmix(hash);
}
public static long hashUTF8String(UTF8String str, long seed) {
return hashUnsafeBytes(str.getBaseObject(), str.getBaseOffset(), str.numBytes(), seed);
}
private static long fmix(long hash) {
hash ^= hash >>> 33;
hash *= PRIME64_2;
hash ^= hash >>> 29;
hash *= PRIME64_3;
hash ^= hash >>> 32;
return hash;
}
private static long hashBytesByWords(Object base, long offset, int length, long seed) {
long end = offset + length;
long hash;
if (length >= 32) {
long limit = end - 32;
long v1 = seed + PRIME64_1 + PRIME64_2;
long v2 = seed + PRIME64_2;
long v3 = seed;
long v4 = seed - PRIME64_1;
do {
long k1 = Platform.getLong(base, offset);
long k2 = Platform.getLong(base, offset + 8);
long k3 = Platform.getLong(base, offset + 16);
long k4 = Platform.getLong(base, offset + 24);
if (isBigEndian) {
k1 = Long.reverseBytes(k1);
k2 = Long.reverseBytes(k2);
k3 = Long.reverseBytes(k3);
k4 = Long.reverseBytes(k4);
}
v1 = Long.rotateLeft(v1 + (k1 * PRIME64_2), 31) * PRIME64_1;
v2 = Long.rotateLeft(v2 + (k2 * PRIME64_2), 31) * PRIME64_1;
v3 = Long.rotateLeft(v3 + (k3 * PRIME64_2), 31) * PRIME64_1;
v4 = Long.rotateLeft(v4 + (k4 * PRIME64_2), 31) * PRIME64_1;
offset += 32L;
} while (offset <= limit);
hash = Long.rotateLeft(v1, 1)
+ Long.rotateLeft(v2, 7)
+ Long.rotateLeft(v3, 12)
+ Long.rotateLeft(v4, 18);
v1 *= PRIME64_2;
v1 = Long.rotateLeft(v1, 31);
v1 *= PRIME64_1;
hash ^= v1;
hash = hash * PRIME64_1 + PRIME64_4;
v2 *= PRIME64_2;
v2 = Long.rotateLeft(v2, 31);
v2 *= PRIME64_1;
hash ^= v2;
hash = hash * PRIME64_1 + PRIME64_4;
v3 *= PRIME64_2;
v3 = Long.rotateLeft(v3, 31);
v3 *= PRIME64_1;
hash ^= v3;
hash = hash * PRIME64_1 + PRIME64_4;
v4 *= PRIME64_2;
v4 = Long.rotateLeft(v4, 31);
v4 *= PRIME64_1;
hash ^= v4;
hash = hash * PRIME64_1 + PRIME64_4;
} else {
hash = seed + PRIME64_5;
}
hash += length;
long limit = end - 8;
while (offset <= limit) {
long k1 = Platform.getLong(base, offset);
if (isBigEndian) {
k1 = Long.reverseBytes(k1);
}
hash ^= Long.rotateLeft(k1 * PRIME64_2, 31) * PRIME64_1;
hash = Long.rotateLeft(hash, 27) * PRIME64_1 + PRIME64_4;
offset += 8L;
}
return hash;
}
}
| |
/*
Copyright (c) 2015 by ScaleOut Software, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.scaleoutsoftware.soss.hserver.hadoop;
import com.scaleoutsoftware.soss.hserver.InvocationParameters;
import com.scaleoutsoftware.soss.hserver.MapOutputAccumulator;
import com.scaleoutsoftware.soss.hserver.interop.KeyValueProducer;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.RawComparator;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapred.*;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.JobID;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.OutputCommitter;
import org.apache.hadoop.mapreduce.OutputFormat;
import org.apache.hadoop.mapreduce.Partitioner;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.TaskAttemptID;
import org.apache.hadoop.mapreduce.TaskID;
import org.apache.hadoop.mapreduce.task.JobContextImpl;
import org.apache.hadoop.mapreduce.task.MapContextImpl;
import org.apache.hadoop.mapreduce.task.ReduceContextImpl;
import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.yarn.api.records.LocalResource;
import org.apache.hadoop.yarn.util.ResourceCalculatorProcessTree;
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.net.URI;
import java.util.HashMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeoutException;
public class HadoopVersionSpecificCode_HDP2_1_YARN extends HadoopVersionSpecificCode {
@Override
public String getHadoopLibraryString() {
return "hdp2.1-yarn";
}
@Override
public TaskAttemptID createTaskAttemptId(JobID jobID, boolean isMapper, int hadoopPartition) {
return new TaskAttemptID(new TaskID(jobID, isMapper, hadoopPartition), 0);
}
@Override
public TaskAttemptContext createTaskAttemptContext(Configuration configuration, TaskAttemptID id) {
return new TaskAttemptContextImpl(configuration, id);
}
@Override
public org.apache.hadoop.mapred.JobContext createJobContext(JobConf configuration, JobID jobID) {
//Initialize the distributed cache
return new org.apache.hadoop.mapred.JobContextImpl(configuration, jobID);
}
@Override
public <KEYIN, VALUEIN, KEYOUT, VALUEOUT> Reducer<KEYIN, VALUEIN, KEYOUT, VALUEOUT>.Context getReducerContext(Configuration configuration, TaskAttemptID id, OutputCommitter outputCommitter, RecordWriter<KEYOUT, VALUEOUT> output, KeyValueProducer<KEYIN, Iterable<VALUEIN>> transport, MapOutputAccumulator<KEYOUT, VALUEOUT> consumer) throws IOException, InterruptedException {
return (new WrappingReducer<KEYIN, VALUEIN, KEYOUT, VALUEOUT>()).getReducerContext(configuration, id, outputCommitter, output, transport, consumer);
}
static class WrappingReducer<KEYIN, VALUEIN, KEYOUT, VALUEOUT> extends org.apache.hadoop.mapreduce.Reducer<KEYIN, VALUEIN, KEYOUT, VALUEOUT> {
public WrappingReducer() {
}
@SuppressWarnings("unchecked")
public org.apache.hadoop.mapreduce.Reducer<KEYIN, VALUEIN, KEYOUT, VALUEOUT>.Context getReducerContext(Configuration configuration,
TaskAttemptID id,
OutputCommitter outputCommitter,
RecordWriter<KEYOUT, VALUEOUT> output,
KeyValueProducer<KEYIN, Iterable<VALUEIN>> transport,
MapOutputAccumulator<KEYOUT, VALUEOUT> consumer
) throws IOException, InterruptedException {
return new WrappingContext(configuration, id, outputCommitter, output, transport, consumer);
}
public class WrappingContext extends org.apache.hadoop.mapreduce.Reducer.Context {
ReduceContextImpl impl;
KeyValueProducer<KEYIN, Iterable<VALUEIN>> transport;
MapOutputAccumulator<KEYOUT, VALUEOUT> mapOutputAccumulatorCallback;
@SuppressWarnings("unchecked")
WrappingContext(Configuration configuration,
TaskAttemptID id,
OutputCommitter outputCommitter,
RecordWriter<KEYOUT, VALUEOUT> output,
KeyValueProducer<KEYIN, Iterable<VALUEIN>> transport,
MapOutputAccumulator<KEYOUT, VALUEOUT> consumer
) throws IOException, InterruptedException {
//Override the actual key and value class with Writables, to ensure that constructor
//will not throw exception if SerializationFactory does not support that class.
//Any actual serialization/deserialization is performed by DataTransport, so this
//factory is never used.
super();
impl = new ReduceContextImpl(configuration, id, new DummyRawIterator()
, null, null, output, outputCommitter, new TaskAttemptContextImpl.DummyReporter(), null, Writable.class, Writable.class);
this.transport = transport;
this.mapOutputAccumulatorCallback = consumer;
}
@Override
public boolean nextKey() throws IOException, InterruptedException {
try {
if (transport != null) {
return transport.readNext();
} else {
return false;
}
} catch (TimeoutException e) {
throw new IOException("Read operation timed out.", e);
}
}
@Override
public boolean nextKeyValue() throws IOException, InterruptedException {
return impl.nextKeyValue();
}
@Override
public KEYIN getCurrentKey() {
return transport != null ? transport.getKey() : null;
}
@Override
public Object getCurrentValue() {
return impl.getCurrentValue();
}
@Override
public void write(Object key, Object value) throws IOException, InterruptedException {
if (mapOutputAccumulatorCallback != null) {
mapOutputAccumulatorCallback.saveCombineResult((KEYOUT) key, (VALUEOUT) value);
} else {
impl.write(key, value);
}
}
@Override
public OutputCommitter getOutputCommitter() {
return impl.getOutputCommitter();
}
//@Override
public boolean userClassesTakesPrecedence() {
return true;
}
@Override
public TaskAttemptID getTaskAttemptID() {
return impl.getTaskAttemptID();
}
@Override
public String getStatus() {
return impl.getStatus();
}
@Override
public Counter getCounter(Enum<?> counterName) {
return impl.getCounter(counterName);
}
@Override
public Counter getCounter(String groupName, String counterName) {
return impl.getCounter(groupName, counterName);
}
@Override
public void progress() {
//impl.progress();
}
@Override
public void setStatus(String status) {
impl.setStatus(status);
}
// @Override
// public float getProgress() {
// return impl.getProgress();
// }
@Override
public Configuration getConfiguration() {
return impl.getConfiguration();
}
@Override
public JobID getJobID() {
return impl.getJobID();
}
@Override
public int getNumReduceTasks() {
return impl.getNumReduceTasks();
}
@Override
public Path getWorkingDirectory() throws IOException {
return impl.getWorkingDirectory();
}
@Override
public Class<?> getOutputKeyClass() {
return impl.getOutputKeyClass();
}
@Override
public Class<?> getOutputValueClass() {
return impl.getOutputValueClass();
}
@Override
public Class<?> getMapOutputKeyClass() {
return impl.getMapOutputKeyClass();
}
@Override
public Class<?> getMapOutputValueClass() {
return impl.getMapOutputValueClass();
}
@Override
public String getJobName() {
return impl.getJobName();
}
@Override
public Class<? extends InputFormat<?, ?>> getInputFormatClass() throws ClassNotFoundException {
return impl.getInputFormatClass();
}
@Override
public Class<? extends Mapper<?, ?, ?, ?>> getMapperClass() throws ClassNotFoundException {
return impl.getMapperClass();
}
@Override
public Class<? extends Reducer<?, ?, ?, ?>> getCombinerClass() throws ClassNotFoundException {
return impl.getCombinerClass();
}
@Override
public Class<? extends Reducer<?, ?, ?, ?>> getReducerClass() throws ClassNotFoundException {
return impl.getReducerClass();
}
@Override
public Class<? extends OutputFormat<?, ?>> getOutputFormatClass() throws ClassNotFoundException {
return impl.getOutputFormatClass();
}
@Override
public Class<? extends Partitioner<?, ?>> getPartitionerClass() throws ClassNotFoundException {
return impl.getPartitionerClass();
}
@Override
public RawComparator<?> getSortComparator() {
return impl.getSortComparator();
}
@Override
public String getJar() {
return impl.getJar();
}
@Override
public RawComparator<?> getGroupingComparator() {
return impl.getGroupingComparator();
}
@Override
public boolean getJobSetupCleanupNeeded() {
return impl.getJobSetupCleanupNeeded();
}
// @Override
// public boolean getTaskCleanupNeeded() {
// return impl.getTaskCleanupNeeded();
// }
@Override
public boolean getSymlink() {
return impl.getSymlink();
}
@Override
public Path[] getArchiveClassPaths() {
return impl.getArchiveClassPaths();
}
@Override
public URI[] getCacheArchives() throws IOException {
return impl.getCacheArchives();
}
@Override
public URI[] getCacheFiles() throws IOException {
return impl.getCacheFiles();
}
@Override
public Path[] getLocalCacheArchives() throws IOException {
return impl.getLocalCacheArchives();
}
@Override
public Path[] getLocalCacheFiles() throws IOException {
return impl.getLocalCacheFiles();
}
@Override
public Path[] getFileClassPaths() {
return impl.getFileClassPaths();
}
@Override
public String[] getArchiveTimestamps() {
return impl.getArchiveTimestamps();
}
@Override
public String[] getFileTimestamps() {
return impl.getFileTimestamps();
}
@Override
public int getMaxMapAttempts() {
return impl.getMaxMapAttempts();
}
@Override
public int getMaxReduceAttempts() {
return impl.getMaxReduceAttempts();
}
@Override
public boolean getProfileEnabled() {
return impl.getProfileEnabled();
}
@Override
public String getProfileParams() {
return impl.getProfileParams();
}
// @Override
// public Configuration.IntegerRanges getProfileTaskRange(boolean isMap) {
// return impl.getProfileTaskRange(isMap);
// }
@Override
public String getUser() {
return impl.getUser();
}
@Override
public Credentials getCredentials() {
return impl.getCredentials();
}
@Override
@SuppressWarnings("unchecked")
public Iterable getValues() throws IOException, InterruptedException {
return transport != null ? transport.getValue() : null;
}
@Override
public RawComparator<?> getCombinerKeyGroupingComparator() {
return impl.getCombinerKeyGroupingComparator();
}
@Override
public float getProgress() {
return impl.getProgress();
}
@Override
public boolean getTaskCleanupNeeded() {
return impl.getTaskCleanupNeeded();
}
@Override
public Configuration.IntegerRanges getProfileTaskRange(boolean b) {
return impl.getProfileTaskRange(b);
}
public void setJobID(JobID jobId) {
impl.setJobID(jobId);
}
}
}
@Override
public <INKEY, INVALUE, OUTKEY, OUTVALUE> Mapper<INKEY, INVALUE, OUTKEY, OUTVALUE>.Context getMapperContext(Configuration configuration, TaskAttemptID taskid, RecordReader reader, RecordWriter writer) throws IOException, InterruptedException {
return new MapperContextHolder(configuration, taskid, reader, writer).getContext();
}
/**
* This class overrides mapper, to provide dummy context for user-defined mapper invocation.
*/
static class MapperContextHolder extends org.apache.hadoop.mapreduce.Mapper {
private HServerContext context;
@SuppressWarnings("unchecked")
public class HServerContext extends Context {
MapContextImpl impl;
public HServerContext(Configuration configuration, TaskAttemptID taskid, RecordReader reader, RecordWriter writer) throws IOException, InterruptedException {
impl = new MapContextImpl(configuration, taskid, reader, writer, null, new TaskAttemptContextImpl.DummyReporter(), null);
}
public void setJobID(JobID jobId) {
impl.setJobID(jobId);
}
@Override
public InputSplit getInputSplit() {
return impl.getInputSplit();
}
@Override
public Object getCurrentKey() throws IOException, InterruptedException {
return impl.getCurrentKey();
}
//@Override
public boolean userClassesTakesPrecedence() {
return true;
}
@Override
public Object getCurrentValue() throws IOException, InterruptedException {
return impl.getCurrentValue();
}
@Override
public boolean nextKeyValue() throws IOException, InterruptedException {
return impl.nextKeyValue();
}
@Override
public void write(Object key, Object value) throws IOException, InterruptedException {
impl.write(key, value);
}
@Override
public OutputCommitter getOutputCommitter() {
return impl.getOutputCommitter();
}
@Override
public TaskAttemptID getTaskAttemptID() {
return impl.getTaskAttemptID();
}
@Override
public String getStatus() {
return impl.getStatus();
}
@Override
public Counter getCounter(Enum<?> counterName) {
return impl.getCounter(counterName);
}
@Override
public Counter getCounter(String groupName, String counterName) {
return impl.getCounter(groupName, counterName);
}
@Override
public void progress() {
impl.progress();
}
@Override
public void setStatus(String status) {
impl.setStatus(status);
}
// @Override
// public float getProgress() {
// return impl.getProgress();
// }
@Override
public Configuration getConfiguration() {
return impl.getConfiguration();
}
@Override
public JobID getJobID() {
return impl.getJobID();
}
@Override
public int getNumReduceTasks() {
return impl.getNumReduceTasks();
}
@Override
public Path getWorkingDirectory() throws IOException {
return impl.getWorkingDirectory();
}
@Override
public Class<?> getOutputKeyClass() {
return impl.getOutputKeyClass();
}
@Override
public Class<?> getOutputValueClass() {
return impl.getOutputValueClass();
}
@Override
public Class<?> getMapOutputKeyClass() {
return impl.getMapOutputKeyClass();
}
@Override
public Class<?> getMapOutputValueClass() {
return impl.getMapOutputValueClass();
}
@Override
public String getJobName() {
return impl.getJobName();
}
@Override
public Class<? extends InputFormat<?, ?>> getInputFormatClass() throws ClassNotFoundException {
return impl.getInputFormatClass();
}
@Override
public Class<? extends Mapper<?, ?, ?, ?>> getMapperClass() throws ClassNotFoundException {
return impl.getMapperClass();
}
@Override
public Class<? extends Reducer<?, ?, ?, ?>> getCombinerClass() throws ClassNotFoundException {
return impl.getCombinerClass();
}
@Override
public Class<? extends Reducer<?, ?, ?, ?>> getReducerClass() throws ClassNotFoundException {
return impl.getReducerClass();
}
@Override
public Class<? extends OutputFormat<?, ?>> getOutputFormatClass() throws ClassNotFoundException {
return impl.getOutputFormatClass();
}
@Override
public Class<? extends Partitioner<?, ?>> getPartitionerClass() throws ClassNotFoundException {
return impl.getPartitionerClass();
}
@Override
public RawComparator<?> getSortComparator() {
return impl.getSortComparator();
}
@Override
public String getJar() {
return impl.getJar();
}
@Override
public RawComparator<?> getGroupingComparator() {
return impl.getGroupingComparator();
}
@Override
public boolean getJobSetupCleanupNeeded() {
return impl.getJobSetupCleanupNeeded();
}
// @Override
// public boolean getTaskCleanupNeeded() {
// return impl.getTaskCleanupNeeded();
// }
@Override
public boolean getSymlink() {
return impl.getSymlink();
}
@Override
public Path[] getArchiveClassPaths() {
return impl.getArchiveClassPaths();
}
@Override
public URI[] getCacheArchives() throws IOException {
return impl.getCacheArchives();
}
@Override
public URI[] getCacheFiles() throws IOException {
return impl.getCacheFiles();
}
@Override
public Path[] getLocalCacheArchives() throws IOException {
return impl.getLocalCacheArchives();
}
@Override
public Path[] getLocalCacheFiles() throws IOException {
return impl.getLocalCacheFiles();
}
@Override
public Path[] getFileClassPaths() {
return impl.getFileClassPaths();
}
@Override
public String[] getArchiveTimestamps() {
return impl.getArchiveTimestamps();
}
@Override
public String[] getFileTimestamps() {
return impl.getFileTimestamps();
}
@Override
public int getMaxMapAttempts() {
return impl.getMaxMapAttempts();
}
@Override
public int getMaxReduceAttempts() {
return impl.getMaxReduceAttempts();
}
@Override
public boolean getProfileEnabled() {
return impl.getProfileEnabled();
}
@Override
public String getProfileParams() {
return impl.getProfileParams();
}
// @Override
// public Configuration.IntegerRanges getProfileTaskRange(boolean isMap) {
// return impl.getProfileTaskRange(isMap);
// }
//@Override
public String getUser() {
return impl.getUser();
}
@Override
public Credentials getCredentials() {
return impl.getCredentials();
}
@Override
public RawComparator<?> getCombinerKeyGroupingComparator() {
return impl.getCombinerKeyGroupingComparator();
}
@Override
public float getProgress() {
return impl.getProgress();
}
@Override
public boolean getTaskCleanupNeeded() {
return impl.getTaskCleanupNeeded();
}
@Override
public Configuration.IntegerRanges getProfileTaskRange(boolean b) {
return impl.getProfileTaskRange(b);
}
}
public MapperContextHolder(Configuration configuration, TaskAttemptID taskid, RecordReader reader, RecordWriter writer) throws IOException, InterruptedException {
context = new HServerContext(configuration, taskid, reader, writer);
}
public HServerContext getContext() {
return context;
}
}
//----------------------------MAPRED------------------
@Override
public org.apache.hadoop.mapred.TaskAttemptContext createTaskAttemptContextMapred(JobConf jobConf, org.apache.hadoop.mapred.TaskAttemptID taskId) {
try {
Constructor<org.apache.hadoop.mapred.TaskAttemptContextImpl> contextConstructor = org.apache.hadoop.mapred.TaskAttemptContextImpl.class.getDeclaredConstructor(JobConf.class, org.apache.hadoop.mapred.TaskAttemptID.class);
contextConstructor.setAccessible(true);
return contextConstructor.newInstance(jobConf, taskId);
} catch (Exception e) {
throw new RuntimeException("Cannot instantiate TaskAttemptContext.", e);
}
}
//------------------INITILIZE AND TEARDOWN
private static ConcurrentHashMap<JobID, DistributedCacheManager> distributedCaches = new ConcurrentHashMap<JobID, DistributedCacheManager>();
@Override
public void onJobInitialize(InvocationParameters parameters) throws IOException {
//Take this chance to stub out ResourceCalculatorProcessTree
parameters.getHadoopInvocationParameters().getConfiguration().setClass(MRConfig.RESOURCE_CALCULATOR_PROCESS_TREE, DummyResourceCalculatorProcessTree.class, ResourceCalculatorProcessTree.class);
//Initialize the distributed cache
DistributedCacheManager cacheManager = new DistributedCacheManager();
cacheManager.setup(parameters.getHadoopInvocationParameters().getConfiguration());
distributedCaches.put(parameters.getHadoopInvocationParameters().getJobID(), cacheManager);
super.onJobInitialize(parameters);
}
@Override
public void onJobDone(InvocationParameters parameters) throws IOException {
DistributedCacheManager cacheManager = distributedCaches.get(parameters.getJobId());
if(cacheManager != null)
{
cacheManager.close();
distributedCaches.remove(parameters.getJobId());
}
super.onJobDone(parameters);
}
}
| |
/*
* Copyright 2017 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.apicurio.hub.api.bitbucket;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.io.Reader;
import java.nio.charset.StandardCharsets;
import java.util.Collection;
import java.util.Properties;
import org.apache.commons.codec.binary.Base64;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.fasterxml.jackson.databind.node.TextNode;
import com.mashape.unirest.http.exceptions.UnirestException;
import io.apicurio.hub.api.beans.BitbucketRepository;
import io.apicurio.hub.api.beans.BitbucketTeam;
import io.apicurio.hub.api.beans.ResourceContent;
import io.apicurio.hub.api.beans.SourceCodeBranch;
import io.apicurio.hub.api.connectors.SourceConnectorException;
import io.apicurio.hub.core.beans.ApiDesignResourceInfo;
import io.apicurio.hub.core.config.HubConfiguration;
import io.apicurio.hub.core.exceptions.ApiValidationException;
import io.apicurio.hub.core.exceptions.NotFoundException;
import io.apicurio.test.core.TestUtil;
import test.io.apicurio.hub.api.MockSecurityContext;
/**
* @author eric.wittmann@gmail.com
*/
public class BitbucketSourceConnectorTest {
private BitbucketSourceConnector service;
private HubConfiguration config;
private BitbucketResourceResolver resolver;
private static String basicAuth = null;
@BeforeClass
public static void globalSetUp() {
File credsFile = new File(".bitbucket");
if (!credsFile.isFile()) {
return;
}
System.out.println("Loading Bitbucket credentials from: " + credsFile.getAbsolutePath());
try (Reader reader = new FileReader(credsFile)) {
Properties props = new Properties();
props.load(reader);
String userPass = props.getProperty("username") + ":" + props.getProperty("password");
basicAuth = Base64.encodeBase64String(userPass.getBytes(StandardCharsets.UTF_8));
} catch (IOException e) {
e.printStackTrace();
throw new RuntimeException(e);
}
}
@Before
public void setUp() {
service = new BitbucketSourceConnector() {
@Override
protected String getExternalToken() throws SourceConnectorException {
if (basicAuth == null) {
File credsFile = new File(".bitbucket");
throw new SourceConnectorException("Missing Bitbucket credentials. Expected a Java properties file with Bitbucket 'username' and 'password' (personal or App password) located here: " + credsFile.getAbsolutePath());
}
return basicAuth;
}
/**
* @see BitbucketSourceConnector#getExternalTokenType()
*/
@Override
protected Object getExternalTokenType() {
return BitbucketSourceConnector.TOKEN_TYPE_BASIC;
}
};
config = new HubConfiguration();
resolver = new BitbucketResourceResolver();
TestUtil.setPrivateField(resolver, "config", config);
resolver.postConstruct();
TestUtil.setPrivateField(service, "security", new MockSecurityContext());
TestUtil.setPrivateField(service, "config", config);
TestUtil.setPrivateField(service, "resolver", resolver);
}
@After
public void tearDown() throws Exception {
}
@Test
@Ignore
public void testGetTeams() throws SourceConnectorException, BitbucketException {
Collection<BitbucketTeam> teams = service.getTeams();
Assert.assertNotNull(teams);
Assert.assertFalse(teams.isEmpty());
teams.forEach(team -> {
System.out.println("Found team: " + team.getDisplayName() + " -- " + team.getUsername());
});
}
@Test
@Ignore
public void testGetRepositories() throws SourceConnectorException, BitbucketException {
String team = "apicurio";
System.setProperty("apicurio.repository.filter", "oai");
Collection<BitbucketRepository> repos = service.getRepositories(team);
Assert.assertNotNull(repos);
Assert.assertFalse(repos.isEmpty());
repos.forEach( repo -> {
System.out.println("Found repository: " + repo.getName() + " -- " + repo.getSlug());
});
}
@Test
@Ignore
public void testGetBranches() throws SourceConnectorException, BitbucketException {
String team = "apicurio";
String repo = "apicurio-test";
Collection<SourceCodeBranch> branches = service.getBranches(team, repo);
Assert.assertNotNull(branches);
Assert.assertFalse(branches.isEmpty());
branches.forEach( branch -> {
System.out.println("Found branch: " + branch.getName() + " -- " + branch.getCommitId());
});
}
@Test
@Ignore
public void testGetResourceContent() throws SourceConnectorException, BitbucketException, NotFoundException {
String url = "https://bitbucket.org/apicurio/apicurio-test/src/46163f44a4a398e0101ee9ff10affbbf57e066f9/apis/pet-store.json?at=master&fileviewer=file-view-default";
ResourceContent content = service.getResourceContent(url);
Assert.assertNotNull(content);
Assert.assertNotNull(content.getSha());
Assert.assertNotNull(content.getContent());
}
@Test
@Ignore
public void testValidateResourceExists() throws SourceConnectorException, BitbucketException, NotFoundException, ApiValidationException {
String url = "https://bitbucket.org/apicurio/apicurio-test/src/46163f44a4a398e0101ee9ff10affbbf57e066f9/apis/pet-store.json?at=master&fileviewer=file-view-default";
ApiDesignResourceInfo info = service.validateResourceExists(url);
Assert.assertNotNull(info);
url = "https://bitbucket.org/apicurio/apicurio-test/src/master/apis/pet-store.json";
info = service.validateResourceExists(url);
Assert.assertNotNull(info);
url = "https://bitbucket.org/apicurio/apicurio-test/src/master/apis/NOT-FOUND.json";
try {
service.validateResourceExists(url);
Assert.fail("Expected a NotFoundException!");
} catch (NotFoundException e) {
// This is what we expect
}
}
@Test
@Ignore
public void testCreateResourceContent() throws SourceConnectorException, BitbucketException, NotFoundException, UnirestException {
String url = "https://bitbucket.org/apicurio/apicurio-test/src/master/junit-apis/api-" + System.currentTimeMillis() + ".json";
String content = "{\"swagger\":\"2.0\",\"info\":{\"title\":\"hello\",\"description\":\"hello\",\"version\":\"1.0.0\"}}";
service.createResourceContent(url, "testing new message commit message for all", content);
// Already exists - should error out.
url = "https://bitbucket.org/apicurio/apicurio-test/src/master/junit-apis/already-exists.json";
content = "{\"swagger\":\"2.0\",\"info\":{\"title\":\"hello\",\"description\":\"hello\",\"version\":\"1.0.0\"}}";
try {
service.createResourceContent(url, "testing new message commit message for all", content);
} catch (SourceConnectorException e) {
// Expected this
Assert.assertTrue(e.getMessage().contains("already exists"));
}
}
@Test
@Ignore
public void testUpdateResourceContent() throws SourceConnectorException, BitbucketException, NotFoundException,
UnirestException, JsonProcessingException, IOException {
String repositoryUrl = "https://bitbucket.org/apicurio/apicurio-test/src/1b684236c6434bc5c6644cbf62c46bbd8d40f3d1/junit-apis/test-update-content.json?at=master&fileviewer=file-view-default";
ResourceContent content = service.getResourceContent(repositoryUrl);
Assert.assertTrue(content.getContent().contains("swagger"));
ObjectMapper mapper = new ObjectMapper();
JsonNode root = mapper.readTree(content.getContent());
ObjectNode info = (ObjectNode) root.get("info");
long newVersion = System.currentTimeMillis();
info.set("version", TextNode.valueOf(String.valueOf(newVersion)));
System.out.println("Setting new version to: " + newVersion);
String newContent = mapper.writeValueAsString(root);
content.setContent(newContent);
String newSha = service.updateResourceContent(repositoryUrl, "Unit Test: Update Content", "Updated the version of: " + repositoryUrl, content);
System.out.println("New SHA: " + newSha);
}
}
| |
/*
* Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.codepipeline;
import com.amazonaws.services.codepipeline.model.*;
/**
* Interface for accessing CodePipeline asynchronously. Each asynchronous method
* will return a Java Future object representing the asynchronous operation;
* overloads which accept an {@code AsyncHandler} can be used to receive
* notification when an asynchronous operation completes.
* <p>
* <fullname>AWS CodePipeline</fullname> <b>Overview</b>
* <p>
* This is the AWS CodePipeline API Reference. This guide provides descriptions
* of the actions and data types for AWS CodePipeline. Some functionality for
* your pipeline is only configurable through the API. For additional
* information, see the <a
* href="http://docs.aws.amazon.com/pipelines/latest/userguide/welcome.html">AWS
* CodePipeline User Guide</a>.
* </p>
* <p>
* You can use the AWS CodePipeline API to work with pipelines, stages, actions,
* gates, and transitions, as described below.
* </p>
* <p>
* <i>Pipelines</i> are models of automated release processes. Each pipeline is
* uniquely named, and consists of actions, gates, and stages.
* </p>
* <p>
* You can work with pipelines by calling:
* <ul>
* <li><a>CreatePipeline</a>, which creates a uniquely-named pipeline.</li>
* <li><a>DeletePipeline</a>, which deletes the specified pipeline.</li>
* <li><a>GetPipeline</a>, which returns information about a pipeline structure.
* </li>
* <li><a>GetPipelineState</a>, which returns information about the current
* state of the stages and actions of a pipeline.</li>
* <li><a>ListPipelines</a>, which gets a summary of all of the pipelines
* associated with your account.</li>
* <li><a>StartPipelineExecution</a>, which runs the the most recent revision of
* an artifact through the pipeline.</li>
* <li><a>UpdatePipeline</a>, which updates a pipeline with edits or changes to
* the structure of the pipeline.</li>
* </ul>
* <p>
* Pipelines include <i>stages</i>, which are which are logical groupings of
* gates and actions. Each stage contains one or more actions that must complete
* before the next stage begins. A stage will result in success or failure. If a
* stage fails, then the pipeline stops at that stage and will remain stopped
* until either a new version of an artifact appears in the source location, or
* a user takes action to re-run the most recent artifact through the pipeline.
* You can call <a>GetPipelineState</a>, which displays the status of a
* pipeline, including the status of stages in the pipeline, or
* <a>GetPipeline</a>, which returns the entire structure of the pipeline,
* including the stages of that pipeline.
* </p>
* </p>
* <p>
* Pipeline stages include <i>actions</i>, which are categorized into categories
* such as source or build actions performed within a stage of a pipeline. For
* example, you can use a source action to import artifacts into a pipeline from
* a source such as Amazon S3. Like stages, you do not work with actions
* directly in most cases, but you do define and interact with actions when
* working with pipeline operations such as <a>CreatePipeline</a> and
* <a>GetPipelineState</a>.
* </p>
* <p>
* Pipelines also include <i>transitions</i>, which allow the transition of
* artifacts from one stage to the next in a pipeline after the actions in one
* stage complete.
* </p>
* <p>
* You can work with transitions by calling:
* </p>
* <ul>
* <li><a>DisableStageTransition</a>, which prevents artifacts from
* transitioning to the next stage in a pipeline.</li>
* <li><a>EnableStageTransition</a>, which enables transition of artifacts
* between stages in a pipeline.</li>
* </ul>
* <p>
* <b>Using the API to integrate with AWS CodePipeline</b>
* </p>
* <p>
* For third-party integrators or developers who want to create their own
* integrations with AWS CodePipeline, the expected sequence varies from the
* standard API user. In order to integrate with AWS CodePipeline, developers
* will need to work with the following items:
* </p>
* <ul>
* <li>Jobs, which are instances of an action. For example, a job for a source
* action might import a revision of an artifact from a source.
* <p>
* You can work with jobs by calling:
* </p>
* <ul>
* <li><a>AcknowledgeJob</a>, which confirms whether a job worker has received
* the specified job,</li>
* <li><a>PollForJobs</a>, which determines whether there are any jobs to act
* upon,</li>
* <li><a>PutJobFailureResult</a>, which provides details of a job failure, and</li>
* <li><a>PutJobSuccessResult</a>, which provides details of a job success.</li>
* </ul>
* </li>
* <li>Third party jobs, which are instances of an action created by a partner
* action and integrated into AWS CodePipeline. Partner actions are created by
* members of the AWS Partner Network.
* <p>
* You can work with third party jobs by calling:
* </p>
* <ul>
* <li><a>AcknowledgeThirdPartyJob</a>, which confirms whether a job worker has
* received the specified job,</li>
* <li><a>PollForThirdPartyJobs</a>, which determines whether there are any jobs
* to act upon,</li>
* <li><a>PutThirdPartyJobFailureResult</a>, which provides details of a job
* failure, and</li>
* <li><a>PutThirdPartyJobSuccessResult</a>, which provides details of a job
* success.</li>
* </ul>
* </li>
* </ul>
*/
public class AWSCodePipelineAsyncClient extends AWSCodePipelineClient implements
AWSCodePipelineAsync {
private static final int DEFAULT_THREAD_POOL_SIZE = 50;
private final java.util.concurrent.ExecutorService executorService;
/**
* Constructs a new asynchronous client to invoke service methods on
* CodePipeline. A credentials provider chain will be used that searches for
* credentials in this order:
* <ul>
* <li>Environment Variables - AWS_ACCESS_KEY_ID and AWS_SECRET_KEY</li>
* <li>Java System Properties - aws.accessKeyId and aws.secretKey</li>
* <li>Credential profiles file at the default location (~/.aws/credentials)
* shared by all AWS SDKs and the AWS CLI</li>
* <li>Instance profile credentials delivered through the Amazon EC2
* metadata service</li>
* </ul>
* <p>
* Asynchronous methods are delegated to a fixed-size thread pool containing
* 50 threads (to match the default maximum number of concurrent connections
* to the service).
*
* @see com.amazonaws.auth.DefaultAWSCredentialsProviderChain
* @see java.util.concurrent.Executors#newFixedThreadPool(int)
*/
public AWSCodePipelineAsyncClient() {
this(new com.amazonaws.auth.DefaultAWSCredentialsProviderChain());
}
/**
* Constructs a new asynchronous client to invoke service methods on
* CodePipeline. A credentials provider chain will be used that searches for
* credentials in this order:
* <ul>
* <li>Environment Variables - AWS_ACCESS_KEY_ID and AWS_SECRET_KEY</li>
* <li>Java System Properties - aws.accessKeyId and aws.secretKey</li>
* <li>Credential profiles file at the default location (~/.aws/credentials)
* shared by all AWS SDKs and the AWS CLI</li>
* <li>Instance profile credentials delivered through the Amazon EC2
* metadata service</li>
* </ul>
* <p>
* Asynchronous methods are delegated to a fixed-size thread pool containing
* a number of threads equal to the maximum number of concurrent connections
* configured via {@code ClientConfiguration.getMaxConnections()}.
*
* @param clientConfiguration
* The client configuration options controlling how this client
* connects to CodePipeline (ex: proxy settings, retry counts, etc).
*
* @see com.amazonaws.auth.DefaultAWSCredentialsProviderChain
* @see java.util.concurrent.Executors#newFixedThreadPool(int)
*/
public AWSCodePipelineAsyncClient(
com.amazonaws.ClientConfiguration clientConfiguration) {
this(new com.amazonaws.auth.DefaultAWSCredentialsProviderChain(),
clientConfiguration, java.util.concurrent.Executors
.newFixedThreadPool(clientConfiguration
.getMaxConnections()));
}
/**
* Constructs a new asynchronous client to invoke service methods on
* CodePipeline using the specified AWS account credentials.
* <p>
* Asynchronous methods are delegated to a fixed-size thread pool containing
* 50 threads (to match the default maximum number of concurrent connections
* to the service).
*
* @param awsCredentials
* The AWS credentials (access key ID and secret key) to use when
* authenticating with AWS services.
* @see java.util.concurrent.Executors#newFixedThreadPool(int)
*/
public AWSCodePipelineAsyncClient(
com.amazonaws.auth.AWSCredentials awsCredentials) {
this(awsCredentials, java.util.concurrent.Executors
.newFixedThreadPool(DEFAULT_THREAD_POOL_SIZE));
}
/**
* Constructs a new asynchronous client to invoke service methods on
* CodePipeline using the specified AWS account credentials and executor
* service. Default client settings will be used.
*
* @param awsCredentials
* The AWS credentials (access key ID and secret key) to use when
* authenticating with AWS services.
* @param executorService
* The executor service by which all asynchronous requests will be
* executed.
*/
public AWSCodePipelineAsyncClient(
com.amazonaws.auth.AWSCredentials awsCredentials,
java.util.concurrent.ExecutorService executorService) {
this(awsCredentials, new com.amazonaws.ClientConfiguration(),
executorService);
}
/**
* Constructs a new asynchronous client to invoke service methods on
* CodePipeline using the specified AWS account credentials, executor
* service, and client configuration options.
*
* @param awsCredentials
* The AWS credentials (access key ID and secret key) to use when
* authenticating with AWS services.
* @param clientConfiguration
* Client configuration options (ex: max retry limit, proxy settings,
* etc).
* @param executorService
* The executor service by which all asynchronous requests will be
* executed.
*/
public AWSCodePipelineAsyncClient(
com.amazonaws.auth.AWSCredentials awsCredentials,
com.amazonaws.ClientConfiguration clientConfiguration,
java.util.concurrent.ExecutorService executorService) {
super(awsCredentials, clientConfiguration);
this.executorService = executorService;
}
/**
* Constructs a new asynchronous client to invoke service methods on
* CodePipeline using the specified AWS account credentials provider.
* Default client settings will be used.
* <p>
* Asynchronous methods are delegated to a fixed-size thread pool containing
* 50 threads (to match the default maximum number of concurrent connections
* to the service).
*
* @param awsCredentialsProvider
* The AWS credentials provider which will provide credentials to
* authenticate requests with AWS services.
* @see java.util.concurrent.Executors#newFixedThreadPool(int)
*/
public AWSCodePipelineAsyncClient(
com.amazonaws.auth.AWSCredentialsProvider awsCredentialsProvider) {
this(awsCredentialsProvider, java.util.concurrent.Executors
.newFixedThreadPool(DEFAULT_THREAD_POOL_SIZE));
}
/**
* Constructs a new asynchronous client to invoke service methods on
* CodePipeline using the provided AWS account credentials provider and
* client configuration options.
* <p>
* Asynchronous methods are delegated to a fixed-size thread pool containing
* a number of threads equal to the maximum number of concurrent connections
* configured via {@code ClientConfiguration.getMaxConnections()}.
*
* @param awsCredentialsProvider
* The AWS credentials provider which will provide credentials to
* authenticate requests with AWS services.
* @param clientConfiguration
* Client configuration options (ex: max retry limit, proxy settings,
* etc).
*
* @see com.amazonaws.auth.DefaultAWSCredentialsProviderChain
* @see java.util.concurrent.Executors#newFixedThreadPool(int)
*/
public AWSCodePipelineAsyncClient(
com.amazonaws.auth.AWSCredentialsProvider awsCredentialsProvider,
com.amazonaws.ClientConfiguration clientConfiguration) {
this(awsCredentialsProvider, clientConfiguration,
java.util.concurrent.Executors
.newFixedThreadPool(clientConfiguration
.getMaxConnections()));
}
/**
* Constructs a new asynchronous client to invoke service methods on
* CodePipeline using the specified AWS account credentials provider and
* executor service. Default client settings will be used.
*
* @param awsCredentialsProvider
* The AWS credentials provider which will provide credentials to
* authenticate requests with AWS services.
* @param executorService
* The executor service by which all asynchronous requests will be
* executed.
*/
public AWSCodePipelineAsyncClient(
com.amazonaws.auth.AWSCredentialsProvider awsCredentialsProvider,
java.util.concurrent.ExecutorService executorService) {
this(awsCredentialsProvider, new com.amazonaws.ClientConfiguration(),
executorService);
}
/**
* Constructs a new asynchronous client to invoke service methods on
* CodePipeline using the specified AWS account credentials provider,
* executor service, and client configuration options.
*
* @param awsCredentialsProvider
* The AWS credentials provider which will provide credentials to
* authenticate requests with AWS services.
* @param clientConfiguration
* Client configuration options (ex: max retry limit, proxy settings,
* etc).
* @param executorService
* The executor service by which all asynchronous requests will be
* executed.
*/
public AWSCodePipelineAsyncClient(
com.amazonaws.auth.AWSCredentialsProvider awsCredentialsProvider,
com.amazonaws.ClientConfiguration clientConfiguration,
java.util.concurrent.ExecutorService executorService) {
super(awsCredentialsProvider, clientConfiguration);
this.executorService = executorService;
}
/**
* Returns the executor service used by this client to execute async
* requests.
*
* @return The executor service used by this client to execute async
* requests.
*/
public java.util.concurrent.ExecutorService getExecutorService() {
return executorService;
}
/**
* Shuts down the client, releasing all managed resources. This includes
* forcibly terminating all pending asynchronous service calls. Clients who
* wish to give pending asynchronous service calls time to complete should
* call {@code getExecutorService().shutdown()} followed by
* {@code getExecutorService().awaitTermination()} prior to calling this
* method.
*/
@Override
public void shutdown() {
super.shutdown();
executorService.shutdownNow();
}
@Override
public java.util.concurrent.Future<AcknowledgeJobResult> acknowledgeJobAsync(
final AcknowledgeJobRequest acknowledgeJobRequest) {
return executorService
.submit(new java.util.concurrent.Callable<AcknowledgeJobResult>() {
@Override
public AcknowledgeJobResult call() {
return acknowledgeJob(acknowledgeJobRequest);
}
});
}
@Override
public java.util.concurrent.Future<AcknowledgeJobResult> acknowledgeJobAsync(
final AcknowledgeJobRequest acknowledgeJobRequest,
final com.amazonaws.handlers.AsyncHandler<AcknowledgeJobRequest, AcknowledgeJobResult> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<AcknowledgeJobResult>() {
@Override
public AcknowledgeJobResult call() throws Exception {
AcknowledgeJobResult result;
try {
result = acknowledgeJob(acknowledgeJobRequest);
} catch (Exception ex) {
asyncHandler.onError(ex);
throw ex;
}
asyncHandler.onSuccess(acknowledgeJobRequest, result);
return result;
}
});
}
@Override
public java.util.concurrent.Future<AcknowledgeThirdPartyJobResult> acknowledgeThirdPartyJobAsync(
final AcknowledgeThirdPartyJobRequest acknowledgeThirdPartyJobRequest) {
return executorService
.submit(new java.util.concurrent.Callable<AcknowledgeThirdPartyJobResult>() {
@Override
public AcknowledgeThirdPartyJobResult call() {
return acknowledgeThirdPartyJob(acknowledgeThirdPartyJobRequest);
}
});
}
@Override
public java.util.concurrent.Future<AcknowledgeThirdPartyJobResult> acknowledgeThirdPartyJobAsync(
final AcknowledgeThirdPartyJobRequest acknowledgeThirdPartyJobRequest,
final com.amazonaws.handlers.AsyncHandler<AcknowledgeThirdPartyJobRequest, AcknowledgeThirdPartyJobResult> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<AcknowledgeThirdPartyJobResult>() {
@Override
public AcknowledgeThirdPartyJobResult call()
throws Exception {
AcknowledgeThirdPartyJobResult result;
try {
result = acknowledgeThirdPartyJob(acknowledgeThirdPartyJobRequest);
} catch (Exception ex) {
asyncHandler.onError(ex);
throw ex;
}
asyncHandler.onSuccess(acknowledgeThirdPartyJobRequest,
result);
return result;
}
});
}
@Override
public java.util.concurrent.Future<CreateCustomActionTypeResult> createCustomActionTypeAsync(
final CreateCustomActionTypeRequest createCustomActionTypeRequest) {
return executorService
.submit(new java.util.concurrent.Callable<CreateCustomActionTypeResult>() {
@Override
public CreateCustomActionTypeResult call() {
return createCustomActionType(createCustomActionTypeRequest);
}
});
}
@Override
public java.util.concurrent.Future<CreateCustomActionTypeResult> createCustomActionTypeAsync(
final CreateCustomActionTypeRequest createCustomActionTypeRequest,
final com.amazonaws.handlers.AsyncHandler<CreateCustomActionTypeRequest, CreateCustomActionTypeResult> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<CreateCustomActionTypeResult>() {
@Override
public CreateCustomActionTypeResult call() throws Exception {
CreateCustomActionTypeResult result;
try {
result = createCustomActionType(createCustomActionTypeRequest);
} catch (Exception ex) {
asyncHandler.onError(ex);
throw ex;
}
asyncHandler.onSuccess(createCustomActionTypeRequest,
result);
return result;
}
});
}
@Override
public java.util.concurrent.Future<CreatePipelineResult> createPipelineAsync(
final CreatePipelineRequest createPipelineRequest) {
return executorService
.submit(new java.util.concurrent.Callable<CreatePipelineResult>() {
@Override
public CreatePipelineResult call() {
return createPipeline(createPipelineRequest);
}
});
}
@Override
public java.util.concurrent.Future<CreatePipelineResult> createPipelineAsync(
final CreatePipelineRequest createPipelineRequest,
final com.amazonaws.handlers.AsyncHandler<CreatePipelineRequest, CreatePipelineResult> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<CreatePipelineResult>() {
@Override
public CreatePipelineResult call() throws Exception {
CreatePipelineResult result;
try {
result = createPipeline(createPipelineRequest);
} catch (Exception ex) {
asyncHandler.onError(ex);
throw ex;
}
asyncHandler.onSuccess(createPipelineRequest, result);
return result;
}
});
}
@Override
public java.util.concurrent.Future<Void> deleteCustomActionTypeAsync(
final DeleteCustomActionTypeRequest deleteCustomActionTypeRequest) {
return executorService
.submit(new java.util.concurrent.Callable<Void>() {
@Override
public Void call() {
deleteCustomActionType(deleteCustomActionTypeRequest);
return null;
}
});
}
@Override
public java.util.concurrent.Future<Void> deleteCustomActionTypeAsync(
final DeleteCustomActionTypeRequest deleteCustomActionTypeRequest,
final com.amazonaws.handlers.AsyncHandler<DeleteCustomActionTypeRequest, Void> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<Void>() {
@Override
public Void call() throws Exception {
Void result;
try {
deleteCustomActionType(deleteCustomActionTypeRequest);
result = null;
} catch (Exception ex) {
asyncHandler.onError(ex);
throw ex;
}
asyncHandler.onSuccess(deleteCustomActionTypeRequest,
result);
return result;
}
});
}
@Override
public java.util.concurrent.Future<Void> deletePipelineAsync(
final DeletePipelineRequest deletePipelineRequest) {
return executorService
.submit(new java.util.concurrent.Callable<Void>() {
@Override
public Void call() {
deletePipeline(deletePipelineRequest);
return null;
}
});
}
@Override
public java.util.concurrent.Future<Void> deletePipelineAsync(
final DeletePipelineRequest deletePipelineRequest,
final com.amazonaws.handlers.AsyncHandler<DeletePipelineRequest, Void> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<Void>() {
@Override
public Void call() throws Exception {
Void result;
try {
deletePipeline(deletePipelineRequest);
result = null;
} catch (Exception ex) {
asyncHandler.onError(ex);
throw ex;
}
asyncHandler.onSuccess(deletePipelineRequest, result);
return result;
}
});
}
@Override
public java.util.concurrent.Future<Void> disableStageTransitionAsync(
final DisableStageTransitionRequest disableStageTransitionRequest) {
return executorService
.submit(new java.util.concurrent.Callable<Void>() {
@Override
public Void call() {
disableStageTransition(disableStageTransitionRequest);
return null;
}
});
}
@Override
public java.util.concurrent.Future<Void> disableStageTransitionAsync(
final DisableStageTransitionRequest disableStageTransitionRequest,
final com.amazonaws.handlers.AsyncHandler<DisableStageTransitionRequest, Void> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<Void>() {
@Override
public Void call() throws Exception {
Void result;
try {
disableStageTransition(disableStageTransitionRequest);
result = null;
} catch (Exception ex) {
asyncHandler.onError(ex);
throw ex;
}
asyncHandler.onSuccess(disableStageTransitionRequest,
result);
return result;
}
});
}
@Override
public java.util.concurrent.Future<Void> enableStageTransitionAsync(
final EnableStageTransitionRequest enableStageTransitionRequest) {
return executorService
.submit(new java.util.concurrent.Callable<Void>() {
@Override
public Void call() {
enableStageTransition(enableStageTransitionRequest);
return null;
}
});
}
@Override
public java.util.concurrent.Future<Void> enableStageTransitionAsync(
final EnableStageTransitionRequest enableStageTransitionRequest,
final com.amazonaws.handlers.AsyncHandler<EnableStageTransitionRequest, Void> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<Void>() {
@Override
public Void call() throws Exception {
Void result;
try {
enableStageTransition(enableStageTransitionRequest);
result = null;
} catch (Exception ex) {
asyncHandler.onError(ex);
throw ex;
}
asyncHandler.onSuccess(enableStageTransitionRequest,
result);
return result;
}
});
}
@Override
public java.util.concurrent.Future<GetJobDetailsResult> getJobDetailsAsync(
final GetJobDetailsRequest getJobDetailsRequest) {
return executorService
.submit(new java.util.concurrent.Callable<GetJobDetailsResult>() {
@Override
public GetJobDetailsResult call() {
return getJobDetails(getJobDetailsRequest);
}
});
}
@Override
public java.util.concurrent.Future<GetJobDetailsResult> getJobDetailsAsync(
final GetJobDetailsRequest getJobDetailsRequest,
final com.amazonaws.handlers.AsyncHandler<GetJobDetailsRequest, GetJobDetailsResult> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<GetJobDetailsResult>() {
@Override
public GetJobDetailsResult call() throws Exception {
GetJobDetailsResult result;
try {
result = getJobDetails(getJobDetailsRequest);
} catch (Exception ex) {
asyncHandler.onError(ex);
throw ex;
}
asyncHandler.onSuccess(getJobDetailsRequest, result);
return result;
}
});
}
@Override
public java.util.concurrent.Future<GetPipelineResult> getPipelineAsync(
final GetPipelineRequest getPipelineRequest) {
return executorService
.submit(new java.util.concurrent.Callable<GetPipelineResult>() {
@Override
public GetPipelineResult call() {
return getPipeline(getPipelineRequest);
}
});
}
@Override
public java.util.concurrent.Future<GetPipelineResult> getPipelineAsync(
final GetPipelineRequest getPipelineRequest,
final com.amazonaws.handlers.AsyncHandler<GetPipelineRequest, GetPipelineResult> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<GetPipelineResult>() {
@Override
public GetPipelineResult call() throws Exception {
GetPipelineResult result;
try {
result = getPipeline(getPipelineRequest);
} catch (Exception ex) {
asyncHandler.onError(ex);
throw ex;
}
asyncHandler.onSuccess(getPipelineRequest, result);
return result;
}
});
}
@Override
public java.util.concurrent.Future<GetPipelineStateResult> getPipelineStateAsync(
final GetPipelineStateRequest getPipelineStateRequest) {
return executorService
.submit(new java.util.concurrent.Callable<GetPipelineStateResult>() {
@Override
public GetPipelineStateResult call() {
return getPipelineState(getPipelineStateRequest);
}
});
}
@Override
public java.util.concurrent.Future<GetPipelineStateResult> getPipelineStateAsync(
final GetPipelineStateRequest getPipelineStateRequest,
final com.amazonaws.handlers.AsyncHandler<GetPipelineStateRequest, GetPipelineStateResult> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<GetPipelineStateResult>() {
@Override
public GetPipelineStateResult call() throws Exception {
GetPipelineStateResult result;
try {
result = getPipelineState(getPipelineStateRequest);
} catch (Exception ex) {
asyncHandler.onError(ex);
throw ex;
}
asyncHandler.onSuccess(getPipelineStateRequest, result);
return result;
}
});
}
@Override
public java.util.concurrent.Future<GetThirdPartyJobDetailsResult> getThirdPartyJobDetailsAsync(
final GetThirdPartyJobDetailsRequest getThirdPartyJobDetailsRequest) {
return executorService
.submit(new java.util.concurrent.Callable<GetThirdPartyJobDetailsResult>() {
@Override
public GetThirdPartyJobDetailsResult call() {
return getThirdPartyJobDetails(getThirdPartyJobDetailsRequest);
}
});
}
@Override
public java.util.concurrent.Future<GetThirdPartyJobDetailsResult> getThirdPartyJobDetailsAsync(
final GetThirdPartyJobDetailsRequest getThirdPartyJobDetailsRequest,
final com.amazonaws.handlers.AsyncHandler<GetThirdPartyJobDetailsRequest, GetThirdPartyJobDetailsResult> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<GetThirdPartyJobDetailsResult>() {
@Override
public GetThirdPartyJobDetailsResult call()
throws Exception {
GetThirdPartyJobDetailsResult result;
try {
result = getThirdPartyJobDetails(getThirdPartyJobDetailsRequest);
} catch (Exception ex) {
asyncHandler.onError(ex);
throw ex;
}
asyncHandler.onSuccess(getThirdPartyJobDetailsRequest,
result);
return result;
}
});
}
@Override
public java.util.concurrent.Future<ListActionTypesResult> listActionTypesAsync(
final ListActionTypesRequest listActionTypesRequest) {
return executorService
.submit(new java.util.concurrent.Callable<ListActionTypesResult>() {
@Override
public ListActionTypesResult call() {
return listActionTypes(listActionTypesRequest);
}
});
}
@Override
public java.util.concurrent.Future<ListActionTypesResult> listActionTypesAsync(
final ListActionTypesRequest listActionTypesRequest,
final com.amazonaws.handlers.AsyncHandler<ListActionTypesRequest, ListActionTypesResult> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<ListActionTypesResult>() {
@Override
public ListActionTypesResult call() throws Exception {
ListActionTypesResult result;
try {
result = listActionTypes(listActionTypesRequest);
} catch (Exception ex) {
asyncHandler.onError(ex);
throw ex;
}
asyncHandler.onSuccess(listActionTypesRequest, result);
return result;
}
});
}
@Override
public java.util.concurrent.Future<ListPipelinesResult> listPipelinesAsync(
final ListPipelinesRequest listPipelinesRequest) {
return executorService
.submit(new java.util.concurrent.Callable<ListPipelinesResult>() {
@Override
public ListPipelinesResult call() {
return listPipelines(listPipelinesRequest);
}
});
}
@Override
public java.util.concurrent.Future<ListPipelinesResult> listPipelinesAsync(
final ListPipelinesRequest listPipelinesRequest,
final com.amazonaws.handlers.AsyncHandler<ListPipelinesRequest, ListPipelinesResult> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<ListPipelinesResult>() {
@Override
public ListPipelinesResult call() throws Exception {
ListPipelinesResult result;
try {
result = listPipelines(listPipelinesRequest);
} catch (Exception ex) {
asyncHandler.onError(ex);
throw ex;
}
asyncHandler.onSuccess(listPipelinesRequest, result);
return result;
}
});
}
@Override
public java.util.concurrent.Future<PollForJobsResult> pollForJobsAsync(
final PollForJobsRequest pollForJobsRequest) {
return executorService
.submit(new java.util.concurrent.Callable<PollForJobsResult>() {
@Override
public PollForJobsResult call() {
return pollForJobs(pollForJobsRequest);
}
});
}
@Override
public java.util.concurrent.Future<PollForJobsResult> pollForJobsAsync(
final PollForJobsRequest pollForJobsRequest,
final com.amazonaws.handlers.AsyncHandler<PollForJobsRequest, PollForJobsResult> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<PollForJobsResult>() {
@Override
public PollForJobsResult call() throws Exception {
PollForJobsResult result;
try {
result = pollForJobs(pollForJobsRequest);
} catch (Exception ex) {
asyncHandler.onError(ex);
throw ex;
}
asyncHandler.onSuccess(pollForJobsRequest, result);
return result;
}
});
}
@Override
public java.util.concurrent.Future<PollForThirdPartyJobsResult> pollForThirdPartyJobsAsync(
final PollForThirdPartyJobsRequest pollForThirdPartyJobsRequest) {
return executorService
.submit(new java.util.concurrent.Callable<PollForThirdPartyJobsResult>() {
@Override
public PollForThirdPartyJobsResult call() {
return pollForThirdPartyJobs(pollForThirdPartyJobsRequest);
}
});
}
@Override
public java.util.concurrent.Future<PollForThirdPartyJobsResult> pollForThirdPartyJobsAsync(
final PollForThirdPartyJobsRequest pollForThirdPartyJobsRequest,
final com.amazonaws.handlers.AsyncHandler<PollForThirdPartyJobsRequest, PollForThirdPartyJobsResult> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<PollForThirdPartyJobsResult>() {
@Override
public PollForThirdPartyJobsResult call() throws Exception {
PollForThirdPartyJobsResult result;
try {
result = pollForThirdPartyJobs(pollForThirdPartyJobsRequest);
} catch (Exception ex) {
asyncHandler.onError(ex);
throw ex;
}
asyncHandler.onSuccess(pollForThirdPartyJobsRequest,
result);
return result;
}
});
}
@Override
public java.util.concurrent.Future<PutActionRevisionResult> putActionRevisionAsync(
final PutActionRevisionRequest putActionRevisionRequest) {
return executorService
.submit(new java.util.concurrent.Callable<PutActionRevisionResult>() {
@Override
public PutActionRevisionResult call() {
return putActionRevision(putActionRevisionRequest);
}
});
}
@Override
public java.util.concurrent.Future<PutActionRevisionResult> putActionRevisionAsync(
final PutActionRevisionRequest putActionRevisionRequest,
final com.amazonaws.handlers.AsyncHandler<PutActionRevisionRequest, PutActionRevisionResult> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<PutActionRevisionResult>() {
@Override
public PutActionRevisionResult call() throws Exception {
PutActionRevisionResult result;
try {
result = putActionRevision(putActionRevisionRequest);
} catch (Exception ex) {
asyncHandler.onError(ex);
throw ex;
}
asyncHandler
.onSuccess(putActionRevisionRequest, result);
return result;
}
});
}
@Override
public java.util.concurrent.Future<Void> putJobFailureResultAsync(
final PutJobFailureResultRequest putJobFailureResultRequest) {
return executorService
.submit(new java.util.concurrent.Callable<Void>() {
@Override
public Void call() {
putJobFailureResult(putJobFailureResultRequest);
return null;
}
});
}
@Override
public java.util.concurrent.Future<Void> putJobFailureResultAsync(
final PutJobFailureResultRequest putJobFailureResultRequest,
final com.amazonaws.handlers.AsyncHandler<PutJobFailureResultRequest, Void> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<Void>() {
@Override
public Void call() throws Exception {
Void result;
try {
putJobFailureResult(putJobFailureResultRequest);
result = null;
} catch (Exception ex) {
asyncHandler.onError(ex);
throw ex;
}
asyncHandler.onSuccess(putJobFailureResultRequest,
result);
return result;
}
});
}
@Override
public java.util.concurrent.Future<Void> putJobSuccessResultAsync(
final PutJobSuccessResultRequest putJobSuccessResultRequest) {
return executorService
.submit(new java.util.concurrent.Callable<Void>() {
@Override
public Void call() {
putJobSuccessResult(putJobSuccessResultRequest);
return null;
}
});
}
@Override
public java.util.concurrent.Future<Void> putJobSuccessResultAsync(
final PutJobSuccessResultRequest putJobSuccessResultRequest,
final com.amazonaws.handlers.AsyncHandler<PutJobSuccessResultRequest, Void> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<Void>() {
@Override
public Void call() throws Exception {
Void result;
try {
putJobSuccessResult(putJobSuccessResultRequest);
result = null;
} catch (Exception ex) {
asyncHandler.onError(ex);
throw ex;
}
asyncHandler.onSuccess(putJobSuccessResultRequest,
result);
return result;
}
});
}
@Override
public java.util.concurrent.Future<Void> putThirdPartyJobFailureResultAsync(
final PutThirdPartyJobFailureResultRequest putThirdPartyJobFailureResultRequest) {
return executorService
.submit(new java.util.concurrent.Callable<Void>() {
@Override
public Void call() {
putThirdPartyJobFailureResult(putThirdPartyJobFailureResultRequest);
return null;
}
});
}
@Override
public java.util.concurrent.Future<Void> putThirdPartyJobFailureResultAsync(
final PutThirdPartyJobFailureResultRequest putThirdPartyJobFailureResultRequest,
final com.amazonaws.handlers.AsyncHandler<PutThirdPartyJobFailureResultRequest, Void> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<Void>() {
@Override
public Void call() throws Exception {
Void result;
try {
putThirdPartyJobFailureResult(putThirdPartyJobFailureResultRequest);
result = null;
} catch (Exception ex) {
asyncHandler.onError(ex);
throw ex;
}
asyncHandler.onSuccess(
putThirdPartyJobFailureResultRequest, result);
return result;
}
});
}
@Override
public java.util.concurrent.Future<Void> putThirdPartyJobSuccessResultAsync(
final PutThirdPartyJobSuccessResultRequest putThirdPartyJobSuccessResultRequest) {
return executorService
.submit(new java.util.concurrent.Callable<Void>() {
@Override
public Void call() {
putThirdPartyJobSuccessResult(putThirdPartyJobSuccessResultRequest);
return null;
}
});
}
@Override
public java.util.concurrent.Future<Void> putThirdPartyJobSuccessResultAsync(
final PutThirdPartyJobSuccessResultRequest putThirdPartyJobSuccessResultRequest,
final com.amazonaws.handlers.AsyncHandler<PutThirdPartyJobSuccessResultRequest, Void> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<Void>() {
@Override
public Void call() throws Exception {
Void result;
try {
putThirdPartyJobSuccessResult(putThirdPartyJobSuccessResultRequest);
result = null;
} catch (Exception ex) {
asyncHandler.onError(ex);
throw ex;
}
asyncHandler.onSuccess(
putThirdPartyJobSuccessResultRequest, result);
return result;
}
});
}
@Override
public java.util.concurrent.Future<StartPipelineExecutionResult> startPipelineExecutionAsync(
final StartPipelineExecutionRequest startPipelineExecutionRequest) {
return executorService
.submit(new java.util.concurrent.Callable<StartPipelineExecutionResult>() {
@Override
public StartPipelineExecutionResult call() {
return startPipelineExecution(startPipelineExecutionRequest);
}
});
}
@Override
public java.util.concurrent.Future<StartPipelineExecutionResult> startPipelineExecutionAsync(
final StartPipelineExecutionRequest startPipelineExecutionRequest,
final com.amazonaws.handlers.AsyncHandler<StartPipelineExecutionRequest, StartPipelineExecutionResult> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<StartPipelineExecutionResult>() {
@Override
public StartPipelineExecutionResult call() throws Exception {
StartPipelineExecutionResult result;
try {
result = startPipelineExecution(startPipelineExecutionRequest);
} catch (Exception ex) {
asyncHandler.onError(ex);
throw ex;
}
asyncHandler.onSuccess(startPipelineExecutionRequest,
result);
return result;
}
});
}
@Override
public java.util.concurrent.Future<UpdatePipelineResult> updatePipelineAsync(
final UpdatePipelineRequest updatePipelineRequest) {
return executorService
.submit(new java.util.concurrent.Callable<UpdatePipelineResult>() {
@Override
public UpdatePipelineResult call() {
return updatePipeline(updatePipelineRequest);
}
});
}
@Override
public java.util.concurrent.Future<UpdatePipelineResult> updatePipelineAsync(
final UpdatePipelineRequest updatePipelineRequest,
final com.amazonaws.handlers.AsyncHandler<UpdatePipelineRequest, UpdatePipelineResult> asyncHandler) {
return executorService
.submit(new java.util.concurrent.Callable<UpdatePipelineResult>() {
@Override
public UpdatePipelineResult call() throws Exception {
UpdatePipelineResult result;
try {
result = updatePipeline(updatePipelineRequest);
} catch (Exception ex) {
asyncHandler.onError(ex);
throw ex;
}
asyncHandler.onSuccess(updatePipelineRequest, result);
return result;
}
});
}
}
| |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.ide.navigationToolbar;
import com.intellij.ide.ui.UISettings;
import com.intellij.openapi.actionSystem.CommonDataKeys;
import com.intellij.openapi.actionSystem.DataContext;
import com.intellij.openapi.actionSystem.LangDataKeys;
import com.intellij.openapi.actionSystem.PlatformDataKeys;
import com.intellij.openapi.application.ReadAction;
import com.intellij.openapi.application.impl.LaterInvocator;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.ProjectFileIndex;
import com.intellij.openapi.roots.ProjectRootManager;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.*;
import com.intellij.util.CommonProcessors;
import com.intellij.util.ObjectUtils;
import com.intellij.util.Processor;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
/**
* @author Konstantin Bulenkov
* @author Anna Kozlova
*/
public class NavBarModel {
private List<Object> myModel = Collections.emptyList();
private int mySelectedIndex;
private final Project myProject;
private final NavBarModelListener myNotificator;
private final NavBarModelBuilder myBuilder;
private boolean myChanged = true;
private boolean updated = false;
private boolean isFixedComponent = false;
public NavBarModel(final Project project) {
myProject = project;
myNotificator = project.getMessageBus().syncPublisher(NavBarModelListener.NAV_BAR);
myBuilder = NavBarModelBuilder.getInstance();
}
public int getSelectedIndex() {
return mySelectedIndex;
}
@Nullable
public Object getSelectedValue() {
return getElement(mySelectedIndex);
}
@Nullable
public Object getElement(int index) {
if (index != -1 && index < myModel.size()) {
return myModel.get(index);
}
return null;
}
public int size() {
return myModel.size();
}
public boolean isEmpty() {
return myModel.isEmpty();
}
public int getIndexByModel(int index) {
if (index < 0) return myModel.size() + index;
if (index >= myModel.size() && myModel.size() > 0) return index % myModel.size();
return index;
}
protected void updateModel(DataContext dataContext) {
if (LaterInvocator.isInModalContext() || (updated && !isFixedComponent)) return;
if (PlatformDataKeys.CONTEXT_COMPONENT.getData(dataContext) instanceof NavBarPanel) return;
PsiElement psiElement = CommonDataKeys.PSI_FILE.getData(dataContext);
if (psiElement == null) {
psiElement = CommonDataKeys.PSI_ELEMENT.getData(dataContext);
}
psiElement = normalize(psiElement);
if (!myModel.isEmpty() && myModel.get(myModel.size() - 1).equals(psiElement) && !myChanged) return;
if (psiElement != null && psiElement.isValid()) {
updateModel(psiElement);
}
else {
if (UISettings.getInstance().getShowNavigationBar() && !myModel.isEmpty()) return;
Object root = calculateRoot(dataContext);
if (root != null) {
setModel(Collections.singletonList(root));
}
}
setChanged(false);
updated = true;
}
private Object calculateRoot(DataContext dataContext) {
// Narrow down the root element to the first interesting one
Object root = LangDataKeys.MODULE.getData(dataContext);
if (root != null) return root;
Project project = CommonDataKeys.PROJECT.getData(dataContext);
if (project == null) return null;
Object projectChild;
Object projectGrandChild = null;
CommonProcessors.FindFirstAndOnlyProcessor<Object> processor = new CommonProcessors.FindFirstAndOnlyProcessor<>();
processChildren(project, processor);
projectChild = processor.reset();
if (projectChild != null) {
processChildren(projectChild, processor);
projectGrandChild = processor.reset();
}
return ObjectUtils.chooseNotNull(projectGrandChild, ObjectUtils.chooseNotNull(projectChild, project));
}
protected void updateModel(final PsiElement psiElement) {
final Set<VirtualFile> roots = new HashSet<>();
final ProjectRootManager projectRootManager = ProjectRootManager.getInstance(myProject);
final ProjectFileIndex projectFileIndex = projectRootManager.getFileIndex();
for (VirtualFile root : projectRootManager.getContentRoots()) {
VirtualFile parent = root.getParent();
if (parent == null || !projectFileIndex.isInContent(parent)) {
roots.add(root);
}
}
for (final NavBarModelExtension modelExtension : NavBarModelExtension.EP_NAME.getExtensionList()) {
for (VirtualFile root : modelExtension.additionalRoots(psiElement.getProject())) {
VirtualFile parent = root.getParent();
if (parent == null || !projectFileIndex.isInContent(parent)) {
roots.add(root);
}
}
}
List<Object> updatedModel = ReadAction.compute(() -> isValid(psiElement) ? myBuilder.createModel(psiElement, roots) : Collections.emptyList());
setModel(ContainerUtil.reverse(updatedModel));
}
void revalidate() {
final List<Object> objects = new ArrayList<>();
boolean update = false;
for (Object o : myModel) {
if (isValid(o)) {
objects.add(o);
} else {
update = true;
break;
}
}
if (update) {
setModel(objects);
}
}
protected void setModel(List<Object> model) {
if (!model.equals(myModel)) {
myModel = model;
myNotificator.modelChanged();
mySelectedIndex = myModel.size() - 1;
myNotificator.selectionChanged();
}
}
public void updateModel(final Object object) {
if (object instanceof PsiElement) {
updateModel((PsiElement)object);
}
else if (object instanceof Module) {
List<Object> l = new ArrayList<>();
l.add(myProject);
l.add(object);
setModel(l);
}
}
protected boolean hasChildren(Object object) {
return !processChildren(object, new CommonProcessors.FindFirstProcessor<>());
}
//to avoid the following situation: element was taken from NavBarPanel via data context and all left children
// were truncated by traverseToRoot
public void setChanged(boolean changed) {
myChanged = changed;
}
static boolean isValid(final Object object) {
if (object instanceof Project) {
return !((Project)object).isDisposed();
}
if (object instanceof Module) {
return !((Module)object).isDisposed();
}
if (object instanceof PsiElement) {
return ReadAction.compute(() -> ((PsiElement)object).isValid()).booleanValue();
}
return object != null;
}
@Nullable
public static PsiElement normalize(@Nullable PsiElement child) {
if (child == null) return null;
List<NavBarModelExtension> extensions = NavBarModelExtension.EP_NAME.getExtensionList();
for (int i = extensions.size() - 1; i >= 0; i--) {
NavBarModelExtension modelExtension = extensions.get(i);
child = modelExtension.adjustElement(child);
if (child == null) return null;
}
return child;
}
protected List<Object> getChildren(final Object object) {
final List<Object> result = ContainerUtil.newArrayList();
Processor<Object> processor = o -> {
ContainerUtil.addIfNotNull(result, o instanceof PsiElement ? normalize((PsiElement)o) : o);
return true;
};
processChildren(object, processor);
Collections.sort(result, new SiblingsComparator());
return result;
}
private boolean processChildren(Object object, @NotNull Processor<Object> processor) {
if (!isValid(object)) return true;
final Object rootElement = size() > 1 ? getElement(1) : null;
if (rootElement != null && !isValid(rootElement)) return true;
for (NavBarModelExtension modelExtension : NavBarModelExtension.EP_NAME.getExtensionList()) {
if (modelExtension instanceof AbstractNavBarModelExtension) {
if (!((AbstractNavBarModelExtension)modelExtension).processChildren(object, rootElement, processor)) return false;
}
}
return true;
}
public Object get(final int index) {
return myModel.get(index);
}
public int indexOf(Object value) {
return myModel.indexOf(value);
}
public void setSelectedIndex(final int selectedIndex) {
if (mySelectedIndex != selectedIndex) {
mySelectedIndex = selectedIndex;
myNotificator.selectionChanged();
}
}
public void setFixedComponent(boolean fixedComponent) {
isFixedComponent = fixedComponent;
}
private static final class SiblingsComparator implements Comparator<Object> {
@Override
public int compare(final Object o1, final Object o2) {
final Pair<Integer, String> w1 = getWeightedName(o1);
final Pair<Integer, String> w2 = getWeightedName(o2);
if (w1 == null) return w2 == null ? 0 : -1;
if (w2 == null) return 1;
if (!w1.first.equals(w2.first)) {
return -w1.first.intValue() + w2.first.intValue();
}
return Comparing.compare(w1.second, w2.second, String.CASE_INSENSITIVE_ORDER);
}
@Nullable
private static Pair<Integer, String> getWeightedName(Object object) {
if (object instanceof Module) {
return Pair.create(5, ((Module)object).getName());
}
if (object instanceof PsiDirectoryContainer) {
return Pair.create(4, ((PsiDirectoryContainer)object).getName());
}
else if (object instanceof PsiDirectory) {
return Pair.create(4, ((PsiDirectory)object).getName());
}
if (object instanceof PsiFile) {
return Pair.create(2, ((PsiFile)object).getName());
}
if (object instanceof PsiNamedElement) {
return Pair.create(3, ((PsiNamedElement)object).getName());
}
return null;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.test.checkpointing.utils;
import org.apache.flink.api.common.accumulators.IntCounter;
import org.apache.flink.api.common.state.ListState;
import org.apache.flink.api.common.state.ListStateDescriptor;
import org.apache.flink.api.common.typeutils.base.StringSerializer;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.state.FunctionInitializationContext;
import org.apache.flink.runtime.state.FunctionSnapshotContext;
import org.apache.flink.streaming.api.checkpoint.CheckpointedFunction;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.apache.flink.streaming.api.functions.source.RichParallelSourceFunction;
import org.apache.flink.streaming.api.functions.source.RichSourceFunction;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.apache.flink.streaming.api.watermark.Watermark;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.junit.Assert.assertThat;
/** A utility class containing common functions/classes used by multiple migration tests. */
public class MigrationTestUtils {
/** A non-parallel source with list state used for testing. */
public static class CheckpointingNonParallelSourceWithListState
implements SourceFunction<Tuple2<Long, Long>>, CheckpointedFunction {
static final ListStateDescriptor<String> STATE_DESCRIPTOR =
new ListStateDescriptor<>("source-state", StringSerializer.INSTANCE);
static final String CHECKPOINTED_STRING = "Here be dragons!";
static final String CHECKPOINTED_STRING_1 = "Here be more dragons!";
static final String CHECKPOINTED_STRING_2 = "Here be yet more dragons!";
static final String CHECKPOINTED_STRING_3 = "Here be the mostest dragons!";
private static final long serialVersionUID = 1L;
private volatile boolean isRunning = true;
private final int numElements;
private transient ListState<String> unionListState;
public CheckpointingNonParallelSourceWithListState(int numElements) {
this.numElements = numElements;
}
@Override
public void snapshotState(FunctionSnapshotContext context) throws Exception {
unionListState.clear();
unionListState.add(CHECKPOINTED_STRING);
unionListState.add(CHECKPOINTED_STRING_1);
unionListState.add(CHECKPOINTED_STRING_2);
unionListState.add(CHECKPOINTED_STRING_3);
}
@Override
public void initializeState(FunctionInitializationContext context) throws Exception {
unionListState = context.getOperatorStateStore().getListState(STATE_DESCRIPTOR);
}
@Override
public void run(SourceContext<Tuple2<Long, Long>> ctx) throws Exception {
ctx.emitWatermark(new Watermark(0));
synchronized (ctx.getCheckpointLock()) {
for (long i = 0; i < numElements; i++) {
ctx.collect(new Tuple2<>(i, i));
}
}
// don't emit a final watermark so that we don't trigger the registered event-time
// timers
while (isRunning) {
Thread.sleep(20);
}
}
@Override
public void cancel() {
isRunning = false;
}
}
/**
* A non-parallel source with union state used to verify the restored state of {@link
* CheckpointingNonParallelSourceWithListState}.
*/
public static class CheckingNonParallelSourceWithListState
extends RichSourceFunction<Tuple2<Long, Long>> implements CheckpointedFunction {
private static final long serialVersionUID = 1L;
static final String SUCCESSFUL_RESTORE_CHECK_ACCUMULATOR =
CheckingNonParallelSourceWithListState.class + "_RESTORE_CHECK";
private volatile boolean isRunning = true;
private final int numElements;
public CheckingNonParallelSourceWithListState(int numElements) {
this.numElements = numElements;
}
@Override
public void snapshotState(FunctionSnapshotContext context) throws Exception {}
@Override
public void initializeState(FunctionInitializationContext context) throws Exception {
ListState<String> unionListState =
context.getOperatorStateStore()
.getListState(
CheckpointingNonParallelSourceWithListState.STATE_DESCRIPTOR);
if (context.isRestored()) {
assertThat(
unionListState.get(),
containsInAnyOrder(
CheckpointingNonParallelSourceWithListState.CHECKPOINTED_STRING,
CheckpointingNonParallelSourceWithListState.CHECKPOINTED_STRING_1,
CheckpointingNonParallelSourceWithListState.CHECKPOINTED_STRING_2,
CheckpointingNonParallelSourceWithListState.CHECKPOINTED_STRING_3));
getRuntimeContext()
.addAccumulator(SUCCESSFUL_RESTORE_CHECK_ACCUMULATOR, new IntCounter());
getRuntimeContext().getAccumulator(SUCCESSFUL_RESTORE_CHECK_ACCUMULATOR).add(1);
} else {
throw new RuntimeException(
"This source should always be restored because it's only used when restoring from a savepoint.");
}
}
@Override
public void run(SourceContext<Tuple2<Long, Long>> ctx) throws Exception {
// immediately trigger any set timers
ctx.emitWatermark(new Watermark(1000));
synchronized (ctx.getCheckpointLock()) {
for (long i = 0; i < numElements; i++) {
ctx.collect(new Tuple2<>(i, i));
}
}
while (isRunning) {
Thread.sleep(20);
}
}
@Override
public void cancel() {
isRunning = false;
}
}
/** A parallel source with union state used for testing. */
public static class CheckpointingParallelSourceWithUnionListState
extends RichSourceFunction<Tuple2<Long, Long>> implements CheckpointedFunction {
static final ListStateDescriptor<String> STATE_DESCRIPTOR =
new ListStateDescriptor<>("source-state", StringSerializer.INSTANCE);
static final String[] CHECKPOINTED_STRINGS = {
"Here be dragons!",
"Here be more dragons!",
"Here be yet more dragons!",
"Here be the mostest dragons!"
};
private static final long serialVersionUID = 1L;
private volatile boolean isRunning = true;
private final int numElements;
private transient ListState<String> unionListState;
public CheckpointingParallelSourceWithUnionListState(int numElements) {
this.numElements = numElements;
}
@Override
public void snapshotState(FunctionSnapshotContext context) throws Exception {
unionListState.clear();
for (String s : CHECKPOINTED_STRINGS) {
if (s.hashCode() % getRuntimeContext().getNumberOfParallelSubtasks()
== getRuntimeContext().getIndexOfThisSubtask()) {
unionListState.add(s);
}
}
}
@Override
public void initializeState(FunctionInitializationContext context) throws Exception {
unionListState = context.getOperatorStateStore().getUnionListState(STATE_DESCRIPTOR);
}
@Override
public void run(SourceContext<Tuple2<Long, Long>> ctx) throws Exception {
ctx.emitWatermark(new Watermark(0));
synchronized (ctx.getCheckpointLock()) {
for (long i = 0; i < numElements; i++) {
if (i % getRuntimeContext().getNumberOfParallelSubtasks()
== getRuntimeContext().getIndexOfThisSubtask()) {
ctx.collect(new Tuple2<>(i, i));
}
}
}
// don't emit a final watermark so that we don't trigger the registered event-time
// timers
while (isRunning) {
Thread.sleep(20);
}
}
@Override
public void cancel() {
isRunning = false;
}
}
/**
* A parallel source with union state used to verify the restored state of {@link
* CheckpointingParallelSourceWithUnionListState}.
*/
public static class CheckingParallelSourceWithUnionListState
extends RichParallelSourceFunction<Tuple2<Long, Long>> implements CheckpointedFunction {
private static final long serialVersionUID = 1L;
static final String SUCCESSFUL_RESTORE_CHECK_ACCUMULATOR =
CheckingParallelSourceWithUnionListState.class + "_RESTORE_CHECK";
private volatile boolean isRunning = true;
private final int numElements;
public CheckingParallelSourceWithUnionListState(int numElements) {
this.numElements = numElements;
}
@Override
public void snapshotState(FunctionSnapshotContext context) throws Exception {}
@Override
public void initializeState(FunctionInitializationContext context) throws Exception {
ListState<String> unionListState =
context.getOperatorStateStore()
.getUnionListState(
CheckpointingNonParallelSourceWithListState.STATE_DESCRIPTOR);
if (context.isRestored()) {
assertThat(
unionListState.get(),
containsInAnyOrder(
CheckpointingParallelSourceWithUnionListState
.CHECKPOINTED_STRINGS));
getRuntimeContext()
.addAccumulator(SUCCESSFUL_RESTORE_CHECK_ACCUMULATOR, new IntCounter());
getRuntimeContext().getAccumulator(SUCCESSFUL_RESTORE_CHECK_ACCUMULATOR).add(1);
} else {
throw new RuntimeException(
"This source should always be restored because it's only used when restoring from a savepoint.");
}
}
@Override
public void run(SourceContext<Tuple2<Long, Long>> ctx) throws Exception {
// immediately trigger any set timers
ctx.emitWatermark(new Watermark(1000));
synchronized (ctx.getCheckpointLock()) {
for (long i = 0; i < numElements; i++) {
if (i % getRuntimeContext().getNumberOfParallelSubtasks()
== getRuntimeContext().getIndexOfThisSubtask()) {
ctx.collect(new Tuple2<>(i, i));
}
}
}
while (isRunning) {
Thread.sleep(20);
}
}
@Override
public void cancel() {
isRunning = false;
}
}
/** A sink which counts the elements it sees in an accumulator. */
public static class AccumulatorCountingSink<T> extends RichSinkFunction<T> {
private static final long serialVersionUID = 1L;
public static final String NUM_ELEMENTS_ACCUMULATOR =
AccumulatorCountingSink.class + "_NUM_ELEMENTS";
int count = 0;
@Override
public void open(Configuration parameters) throws Exception {
super.open(parameters);
getRuntimeContext().addAccumulator(NUM_ELEMENTS_ACCUMULATOR, new IntCounter());
}
@Override
public void invoke(T value, Context context) throws Exception {
count++;
getRuntimeContext().getAccumulator(NUM_ELEMENTS_ACCUMULATOR).add(1);
}
}
}
| |
package nl.victronenergy.util;
import nl.victronenergy.R;
import nl.victronenergy.models.Attribute;
import nl.victronenergy.util.Constants.AttributeUnit;
import android.content.Context;
import android.view.View;
import android.widget.TextView;
/**
* Contains a couple of functions to help formatting the data attributes
*
* @author M2Mobi
*/
public final class AttributeUtils {
private static final String LOG_TAG = "AttributeUtils";
/**
* Set the data attribute value in a textview
*
* @param pContext
* The context needed to load the String resources
* @param pTextView
* The textview to show the value in
* @param pAttribute
* The attribute we want to show the value of
* @param pAttributeUnit
* The unit we would like to show the value in
* @param hideIfAttributeNull
* Hide the textview if the attribute is null
*/
public static void setDataInTextView(Context pContext,
TextView pTextView,
Attribute pAttribute,
AttributeUnit pAttributeUnit,
boolean hideIfAttributeNull) {
if (pTextView != null) {
if (pAttribute == null) {
if (hideIfAttributeNull) {
pTextView.setVisibility(View.GONE);
} else {
pTextView.setText(getNotAvailableString(pContext, pAttributeUnit));
}
} else {
pTextView.setText(getFormattedValue(pContext, pAttribute.getFloatValue(), pAttributeUnit));
}
pTextView.setTextColor(pContext.getResources().getColor(getColorForAttributeUnit(pAttributeUnit)));
}
}
/**
* Returns the color the value should be displayed in
*
* @param pAttributeUnit
* The unit of this attribute
* @return The color of the attribute
*/
public static int getColorForAttributeUnit(AttributeUnit pAttributeUnit) {
switch (pAttributeUnit) {
case AMPHOUR:
return R.color.orange;
case VOLT:
return R.color.blue;
case AMPS:
return R.color.red;
default:
return R.color.black;
}
}
/**
* Returns the not available string for the requested type
*
* @param pContext
* The context needed to load the String resource
* @param pAttributeUnit
* The type we want the string to be in
* @return The not available string for the requested type
*/
public static String getNotAvailableString(Context pContext, AttributeUnit pAttributeUnit) {
switch (pAttributeUnit) {
case AMPHOUR:
return pContext.getString(R.string.not_available_ah);
case AMPS:
return pContext.getString(R.string.not_available_a);
case PERCENTAGE:
return pContext.getString(R.string.not_available_percentage);
case TIME:
return pContext.getString(R.string.not_available_time);
case VOLT:
return pContext.getString(R.string.not_available_v);
case WATTS:
return pContext.getString(R.string.not_available_w);
case COUNT:
return pContext.getString(R.string.not_available);
default:
return pContext.getString(R.string.not_available);
}
}
/**
* Returns the formatted string of a float
*
* @param pContext
* The context needed to load the format String resource
* @param pValue
* The value to format
* @param pAttributeUnit
* The unit to format the value in
* @return Formatted value
*/
public static String getFormattedValue(Context pContext, float pValue, AttributeUnit pAttributeUnit) {
switch (pAttributeUnit) {
case WATTS:
return formatWatts(pContext, pValue);
case VOLT:
return formatVolt(pContext, pValue);
case TIME:
return formatTime(pContext, pValue);
case AMPS:
return formatAmps(pContext, pValue);
case AMPHOUR:
return formatAmpHour(pContext, pValue);
case PERCENTAGE:
return formatPercentage(pContext, pValue);
case COUNT:
return formatCount(pValue);
default:
return pContext.getString(R.string.not_available);
}
}
/**
* Formats a count value
*
* @param pValue
* The attribute we want formatted
* @return Value formatted as a count
*/
private static String formatCount(float pValue) {
return String.valueOf((int) pValue);
}
/**
* Formats an amps value
*
* @param pContext
* Context needed to retrieve the amps format string
* @param pValue
* The attribute we want formatted
* @return Value formatted as amps
*/
private static String formatAmps(Context pContext, float pValue) {
return pContext.getString(R.string.formatted_value_a, Math.abs(pValue));
}
/**
* Formats a AmpHour value
*
* @param pContext
* Context needed to retrieve the AmpHour format string
* @param pValue
* The attribute we want formatted
* @return Value formatted as AmpHour
*/
private static String formatAmpHour(Context pContext, float pValue) {
return pContext.getString(R.string.formatted_value_ah, pValue);
}
/**
* Formats a percentage value
*
* @param pContext
* Context needed to retrieve the percentage format string
* @param pValue
* The attribute we want formatted
* @return Value formatted as percentage
*/
private static String formatPercentage(Context pContext, float pValue) {
if (pValue == 0.0f) {
return pContext.getString(R.string.not_available_percentage);
}
return pContext.getString(R.string.formatted_value_percentage, pValue);
}
/**
* Formats a Watts value
*
* @param pContext
* Context needed to retrieve the Watts/kWatts format string
* @param pValue
* The attribute we want formatted
* @return Value formatted as Watts or kWatts depending on the value
*/
private static String formatWatts(Context pContext, float pValue) {
float value = Math.abs(pValue);
if (value < 10000) {
return pContext.getString(R.string.formatted_value_w, value);
} else {
// Divide by 1000 to get kW
value /= 1000;
return pContext.getString(R.string.formatted_value_kw, value);
}
}
/**
* Formats a volt value
*
* @param pContext
* Context needed to retrieve the volt format string
* @param pValue
* The attribute we want formatted
* @return Value formatted as volt
*/
private static String formatVolt(Context pContext, float pValue) {
return pContext.getString(R.string.formatted_value_v, pValue);
}
/**
* The webservice returns a float (for example 23.041) where 23 would be the amount of hours and 041 the amount of
* seconds. In order to display it according to business rules, it needs to be formatted as hours to go and minutes
* to go (as xxh xxm). If no time is remaining it should display --h --m.
*
* @param pContext
* The context needed to load string resources
* @return The time to go formatted as xxh xxm
*/
private static String formatTime(Context pContext, float pValue) {
// Amount of hours
int hours = (int) Math.floor(pValue);
// Amount of seconds multiplied by 60 to get the amount of minutes
int minutes = Math.round((pValue - hours) * 60);
// Don't show the time if there is no time left
if (hours == 0 && minutes == 0) {
return pContext.getString(R.string.not_available_time);
}
return pContext.getString(R.string.formatted_value_time, hours, minutes);
}
}
| |
// Copyright 2008 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.common.labs.matcher;
import com.google.common.base.Preconditions;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* This class parses a Google URL pattern into an immutable representation that
* provides equivalent Java regexes,
* exact-match patterns and prefix patterns, as appropriate. For a description
* of Google URL patterns, see
* documentation in <a
* href="http://code.google.com/apis/searchappliance/documentation/50/admin/URL_patterns.html">
* this document</a>.
* <p>
* All Google URL patterns can be translated into an equivalent Java regex (with
* some exceptions and caveats, see below). This class provides access to an
* equivalent Java regex through {@link #getUrlRegex()}.
* <p>
* In addition, the class provides further analysis and special kinds of
* patterns, depending on these top-level predicates:
* <ul>
* <li> {@link #isHostPathType()}: Returns {@code true} if the parsed pattern
* is a "host-path" pattern. A "host-path" pattern is a pattern that can be
* parsed into two regexes, a host regex and a path regex, such that a subject
* URL matches the original URL pattern iff the host portion matches the host
* regex and the path portion matches the path regex. If
* {@code isHostPathType()} is true, then {@link #getHostRegex()} and
* {@link #getPathRegex()} return the corresponding regexes. </li>
* <li> {@link #isPathPrefixMatch()}: Returns {@code true} if the parsed
* pattern is a "host-path" pattern and the path portion of the pattern is
* simply a fixed string that must appear at the beginning of the path. In this
* case, {@link #getPathPrefixString()} returns a simple string (not a regex)
* that can be matched against the start of the subject URL's path. </li>
* <li> {@link #isPathExactMatch()}: Returns {@code true} if the parsed
* pattern is a "host-path" pattern and the path portion of the pattern is an
* exact-match string. In this case, {@link #getPathExactString()} returns a
* simple string (not a regex) that can be matched exactly against the subject
* URL's path. </li>
* </ul>
* <p>In summary:
* <ul>
* <li> {@code getUrlRegex()} provides an equivalent Java regex for the entire
* pattern. </li>
* <li> If {@code isHostPathType()} is true, then, {@code getHostRegex()} and
* {@code getPathRegex()} return regexes for the two portions.</li>
* <li> If {@code isPrefixPathMatch()} is true, then,
* {@code getPrefixPathMatchPattern()} returns a simple string pattern for
* prefix match.</li>
* <li> If {@code isPathExactMatch()} is true, then, in addition,
* {@code getPathExactMatchPattern()} returns a simple string pattern for exact
* match.
* </ul>
* <p>
* Note: the "path" portion is the hierarchical part, that is, everything
* following the first slash (not the {@code ://}). The "host" portion is
* everything before that. For example: for the URL
* {@code http://www.example.com/foo/bar}, the protocol-authority portion is
* {@code http://www.example.com/} and the file portion is {@code /foo/bar}.
* Note that the middle slash appears in both portions.
* <p>
* A parser is provided to separate a URL string into host and path portions:
* {@link AnalyzedUrl}. You can access the host and path portions through
* {@link AnalyzedUrl#getHostPart()} and {@link AnalyzedUrl#getPathPart()}. It
* is recommended that this parser be used rather than the standard
* {@code getHost()} and {@code getPath()} functions of {@link java.net.URL},
* because this class and {@code AnalyzedUrl} share parsing infrastructure and
* at present, there is at least one significant difference:
* {@code AnalyzedUrl.getPathPart()} includes the leading slash but
* {@code java.net.URL.getPath()} does not. TODO: fix this.
* <p>
* Exceptions and caveats: not all forms of Google URL patterns are currently
* supported. At present, these exceptions and special cases apply:
* <ul>
* <li> {@code www?:} patterns are not supported </li>
* <li> {@code regexp:} and {@code regexpCase:} patterns are translated simply
* by removing those two prefixes. Thus, the remaining pattern is assumed to be
* a Java regex, not a GNU regex (as documented on the <a
* href="http://code.google.com/apis/searchappliance/documentation/50/admin/URL_patterns.html">
* reference site</a>). </li>
* <li> {@code regexpIgnoreCase:} patterns are handled similarly. In this case,
* the prefix is removed and the pattern is enclosed in {@code (?i:}...{@code )}</li>
* <li> Exception patterns (patterns with leading {@code -} or {@code +-}) are
* not supported.</li>
* </ul>
*/
public class ParsedUrlPattern {
private final String urlPattern;
private final String urlRegex;
private final boolean hostPathType;
private final String hostRegex;
private final String pathRegex;
private final boolean pathExactMatch;
private final String pathExactMatchPattern;
private final boolean prefixPathMatch;
private final String prefixPathMatchPattern;
/**
* Parses a Google URL pattern to Java regexes. Google URL patterns are
* publicly documented <a
* href="http://code.google.com/apis/searchappliance/documentation/50/admin/URL_patterns.html">
* here </a>.
*
* @param urlPattern A Google URL pattern
* @throws IllegalArgumentException if the URL pattern is unsupported or can
* not be parsed
*/
public ParsedUrlPattern(String urlPattern) {
ParsedUrlPatternBuilder t = new ParsedUrlPatternBuilder(urlPattern);
this.urlPattern = t.urlPattern;
this.urlRegex = t.urlRegex;
this.hostPathType = t.hostPathType;
this.hostRegex = t.hostRegex;
this.pathRegex = t.pathRegex;
this.pathExactMatch = t.pathExactMatch;
this.pathExactMatchPattern = t.pathExactMatchPattern;
this.prefixPathMatch = t.prefixPathMatch;
this.prefixPathMatchPattern = t.prefixPathMatchPattern;
}
/**
* Returns a regex that matches the entire URL. A subject string matches the
* URL pattern iff it matches this regex.
*
* @return a regex that matches the entire URL
*/
public String getUrlRegex() {
return urlRegex;
}
/**
* Returns {@code true} if the parsed pattern is a "host-path" pattern. A
* "host-path" pattern is a pattern that can be parsed into two regexes, a
* host regex and a path regex, such that a subject url matches the pattern
* iff the host portion matches the host regex and the path portion matches
* the path regex.
* <p>
* For example, the pattern {@code example.com/foo} might be parsed into two
* regexes, host regex: {@code example.com/$} and path regex: {@code ^/foo}.
*/
public boolean isHostPathType() {
return hostPathType;
}
/**
* Returns a regex that matches the host (protocol and authority) portion of
* the URL. If this is a host-path regex then a subject string matches the url
* pattern iff the host portion matches this regex and the path portion
* matches the corresponding path regex (obtained by {@link #getPathRegex()}).
* <p>
* This should be used against URLs that have been parsed using the
* {@link AnalyzedUrl} class.
* <p>
* Note: this should only be used if {@code isHostPathType()} is true; if not,
* then this method throws an {@code IllegalStateException}.
*
* @return a regex that matches the host (protocol and authority) portion of
* the URL
* @throws IllegalStateException if {@code isHostPathType()} is false
*/
public String getHostRegex() {
Preconditions.checkState(isHostPathType());
return hostRegex;
}
/**
* Returns a regex that matches the path (hierarchical) portion of the URL.
* <p>
* This should be used against URLs that have been parsed using the
* {@link AnalyzedUrl} class.
* <p>
* Note: this should only be used if {@link #isHostPathType()} is true; if
* not, then this method throws an {@code IllegalStateException}.
*
* @return a regex that matches the path (hierarchical) portion of the URL
* @throws IllegalStateException if {@code isHostPathType()} is false
*/
public String getPathRegex() {
Preconditions.checkState(isHostPathType());
return pathRegex;
}
/**
* Indicates whether the parsed pattern gives a prefix match pattern. If this
* is true, then this pattern can be obtained using
* {@link #getPathPrefixString()}.
*
* @return {@code true} if the parsed pattern gives an prefix match pattern.
*/
public boolean isPathPrefixMatch() {
return prefixPathMatch;
}
/**
* If {@link #isPathPrefixMatch()} is true, then this returns a simple string
* that can be matched against the path portion of a subject string using
* {@link String#startsWith(String)}.
* <p>
* Note: this should only be used if {@code isPrefixPathMatch()} is true; if
* not, then this method throws an {@code IllegalStateException}.
*
* @return a string that matches a prefix of the path portion of the URL
* @throws IllegalStateException if {@code isPathPrefixMatch()} is false
*/
public String getPathPrefixString() {
Preconditions.checkState(isPathPrefixMatch());
return prefixPathMatchPattern;
}
/**
* Returns whether the parsed pattern gives an exact match pattern. If this is
* true, then this pattern can be obtained using {@link #getPathExactString()}.
*
* @return {@code true} if the parsed pattern gives an exact match pattern.
*/
public boolean isPathExactMatch() {
return pathExactMatch;
}
/**
* If {@link #isPathExactMatch()} is true, then this returns a simple string
* that can be matched against the path portion of a subject string using
* {@link String#equals(Object)}. Note: this should only be used if
* {@code isPathExactMatch()} is true; if not, then this method throws an
* {@code IllegalStateException}.
*
* @return a string that matches the entire path
* @throws IllegalStateException if {@code isPathExactMatch()} is false
*/
public String getPathExactString() {
Preconditions.checkState(isPathExactMatch());
return pathExactMatchPattern;
}
/**
* Returns the original URL pattern.
*
* @return the original URL pattern.
*/
public String getUrlPattern() {
return urlPattern;
}
// This is the master meta-regex. This is used both for parsing URL patterns
// and for parsing URLs
private static final String URL_METAPATTERN_STRING =
"\\A(\\^)?((?:([^/:$<]*)((?:(?::|(?::/))?\\Z)|(?:://)))?" +
// ___1_____2a__3_________4b__c____d____________e
"(?:([^/:@]*)@)?([^/:<]*)?(?::([^/<]*))?)(/|(?:</>))?(?:(.*?)(\\Z|\\$)?)?\\Z"
// f__5___________6_________g___7__________8__h________i__9____0
;
// Groups: (capturing groups are numbered, non-capturing are lettered)
// 1 anchor (^)
// 2 protocol + authority (not including /)
// a protocol + ((nothing or : or :/ followed by end of pattern) or ::/)
// 3 protocol
// 4 protocol separator ((nothing or : or :/ followed by end of pattern) or
// ::/)
// b nothing or : or :/ followed by end of pattern
// c : or :/
// d :/
// e ::/
// f userinfo + @
// 5 userinfo
// 6 host
// g : + port
// 7 port
// 8 slash (after authority) (could be a slash or "</>")
// h </>
// i file + anchor
// 9 file
// 10 anchor ($)
// This Pattern is package visible so it can be used by AnalyzedUrl
static final Pattern URL_METAPATTERN = Pattern.compile(URL_METAPATTERN_STRING);
// As above, the enum is package visible so it can be used by AnalyzedUrl
// Note: if you change the master regex, you should change this enum to match
static enum MetaRegexGroup {
LEFT_ANCHOR(1), PROTOCOL_AUTHORITY(2), PROTOCOL(3), PROTOCOL_SEPARATOR(4), USERINFO(5),
HOST(6), PORT(7), SLASH_AFTER_AUTHORITY(8), FILE(9), RIGHT_ANCHOR(10);
private int n;
MetaRegexGroup(int n) {
this.n = n;
}
int intValue() {
return n;
}
}
// This static helper is also shared with the AnalyzedUrl
static String getGroup(Matcher m, MetaRegexGroup g) {
String s = m.group(g.intValue());
return (s == null) ? "" : s;
}
private static class ParsedUrlPatternBuilder {
public String urlPattern;
public String urlRegex;
public boolean hostPathType;
public String hostRegex;
public String pathRegex;
public boolean pathExactMatch;
public String pathExactMatchPattern;
public boolean prefixPathMatch;
public String prefixPathMatchPattern;
ParsedUrlPatternBuilder(String urlPattern) {
checkPatternValidity(urlPattern);
this.urlPattern = urlPattern;
analyze();
}
private void analyze() {
if (urlPattern.startsWith(CONTAINS_PATTERNS_METAPATTERN_PREFIX)) {
urlRegex =
Pattern.quote(urlPattern.substring(CONTAINS_PATTERNS_METAPATTERN_PREFIX.length()));
initNonHostPathPattern();
return;
}
if (urlPattern.startsWith(REGEXP_PATTERNS_METAPATTERN_PREFIX)) {
urlRegex = urlPattern.substring(REGEXP_PATTERNS_METAPATTERN_PREFIX.length());
initNonHostPathPattern();
return;
}
if (urlPattern.startsWith(REGEXPCASE_PATTERNS_METAPATTERN_PREFIX)) {
urlRegex = urlPattern.substring(REGEXPCASE_PATTERNS_METAPATTERN_PREFIX.length());
initNonHostPathPattern();
return;
}
if (urlPattern.startsWith(REGEXPIGNORECASE_PATTERNS_METAPATTERN_PREFIX)) {
urlRegex =
"(?i:" + urlPattern.substring(REGEXPIGNORECASE_PATTERNS_METAPATTERN_PREFIX.length())
+ ")";
initNonHostPathPattern();
return;
}
initHostPathPattern();
if (isNullOrEmpty(urlPattern)) {
prefixPathMatch = true;
return;
}
if (testForAndHandleNoSlashSuffixPattern()) {
return;
}
Matcher m = URL_METAPATTERN.matcher(urlPattern);
Preconditions.checkArgument(m.find(), "problem parsing urlpattern: %s", urlPattern);
urlRegex = buildUrlRegex(m);
pathRegex = buildPathRegex(m);
hostRegex = buildHostRegex(m);
}
private void initNonHostPathPattern() {
hostPathType = false;
pathRegex = null;
hostRegex = null;
pathExactMatch = false;
pathExactMatchPattern = null;
prefixPathMatch = false;
prefixPathMatchPattern = null;
}
private void initHostPathPattern() {
hostPathType = true;
urlRegex = "";
pathRegex = "";
hostRegex = "";
pathExactMatch = false;
pathExactMatchPattern = null;
prefixPathMatch = false;
prefixPathMatchPattern = "/";
}
// A suffix pattern (ends in $) that has no slash just doesn't parse well
// with
// the metapattern. So we use a special pattern for this case.
private boolean testForAndHandleNoSlashSuffixPattern() {
Matcher m = NO_SLASH_SUFFIX_PATTERN.matcher(urlPattern);
if (!m.find()) {
return false;
}
urlRegex = Pattern.quote(m.group(1)) + OUTPUT_RIGHT_ANCHOR_PATTERN_STRING;
pathRegex = urlRegex;
hostRegex = "";
pathExactMatch = false;
pathExactMatchPattern = null;
prefixPathMatch = false;
prefixPathMatchPattern = null;
return true;
}
// suffix patterns that contain no slash jam up my master meta-regex: the
// string before the $ gets put in the wrong capturing group. I fought with
// it
// a while but then bailed and just made a special meta-regex for them
private static final String NO_SLASH_SUFFIX_PATTERN_STRING = "\\A([^/]*)\\$\\Z";
private static final Pattern NO_SLASH_SUFFIX_PATTERN =
Pattern.compile(NO_SLASH_SUFFIX_PATTERN_STRING);
private static final String CONTAINS_PATTERNS_METAPATTERN_PREFIX = "contains:";
private static final String REGEXP_PATTERNS_METAPATTERN_PREFIX = "regexp:";
private static final String REGEXPCASE_PATTERNS_METAPATTERN_PREFIX = "regexpCase:";
private static final String REGEXPIGNORECASE_PATTERNS_METAPATTERN_PREFIX = "regexpIgnoreCase:";
private static final String UNSUPPORTED_PATTERNS_METAPATTERN_STRING = "\\A(?:(www\\?:)|(-))";
private static final Pattern UNSUPPORTED_PATTERNS_METAPATTERN =
Pattern.compile(UNSUPPORTED_PATTERNS_METAPATTERN_STRING);
private static final String OUTPUT_RIGHT_ANCHOR_PATTERN_STRING = "\\Z";
private static final String OUTPUT_LEFT_ANCHOR_PATTERN_STRING = "\\A";
private static final String OUTPUT_SLASH = "/";
private static final String OUTPUT_ANY_OR_NO_PORT_PATTERN = "(\\:[^/]*)?";
private static final String OUTPUT_ANY_PORT_PATTERN = "\\:[^/]*";
private static boolean isNullOrEmpty(String s) {
return (s == null || s.length() < 1);
}
// These helper functions whose names match buildSOMETHINGPattern build a
// regex to match the SOMETHING in their names. They should be usable,
// appropriately quoted regexes
private static String buildProtocolUserinfoHostPattern(Matcher m) {
StringBuilder sb = new StringBuilder();
sb.append(getGroup(m, MetaRegexGroup.PROTOCOL));
sb.append(getGroup(m, MetaRegexGroup.PROTOCOL_SEPARATOR));
String userInfo = getGroup(m, MetaRegexGroup.USERINFO);
if (!isNullOrEmpty(userInfo)) {
sb.append(userInfo);
sb.append("@");
}
sb.append(getGroup(m, MetaRegexGroup.HOST));
String unquotedPattern = sb.toString();
return isNullOrEmpty(unquotedPattern) ? "" : Pattern.quote(unquotedPattern);
}
// port is tricky because the absence of a port in a pattern should match
// any
// specific port in a target
private static String buildPortPattern(Matcher m) {
StringBuilder sb = new StringBuilder();
String port = getGroup(m, MetaRegexGroup.PORT);
if (isNullOrEmpty(port)) {
// port was empty - match any port - default or explicit
sb.append(OUTPUT_ANY_OR_NO_PORT_PATTERN);
} else {
if (port.equals("*")) {
// port was explicitly "*" - match any explicitly specified port
sb.append(OUTPUT_ANY_PORT_PATTERN);
} else {
// port was explicit and not "*" - match only that port
sb.append("\\:");
sb.append(Pattern.quote(port));
}
}
return sb.toString();
}
private static String buildUnquotedFilePattern(Matcher m) {
return getGroup(m, MetaRegexGroup.FILE);
}
private static String buildQuotedFilePattern(Matcher m) {
String unquotedPattern = buildUnquotedFilePattern(m);
return isNullOrEmpty(unquotedPattern) ? "" : Pattern.quote(unquotedPattern);
}
// the helper functions whose names match buildSOMETHINGRegex each build one
// of the three public regexes: the urlRegex, the protocolAuthorityRegex and
// the fileRegex.
// The main reason that the urlRegex is not simply the concatenation of the
// protocolAuthorityRegex and the fileRegex is the anchors. Both for
// correctness and efficiency, we want to use anchors only where
// appropriate:
// using ^A.*foo is considerably slower than just using foo.
private String buildUrlRegex(Matcher m) {
StringBuilder sb = new StringBuilder();
String leftAnchor = getGroup(m, MetaRegexGroup.LEFT_ANCHOR);
String protocolUserinfoHostPattern = buildProtocolUserinfoHostPattern(m);
String portPattern = buildPortPattern(m);
String slashAfterAuthority = getGroup(m, MetaRegexGroup.SLASH_AFTER_AUTHORITY);
String filePattern = buildQuotedFilePattern(m);
String rightAnchor = getGroup(m, MetaRegexGroup.RIGHT_ANCHOR);
// prefix patterns need to be handled specially
if (!isNullOrEmpty(leftAnchor)) {
sb.append(OUTPUT_LEFT_ANCHOR_PATTERN_STRING);
}
if (!isNullOrEmpty(protocolUserinfoHostPattern)) {
sb.append(protocolUserinfoHostPattern);
}
if (!isNullOrEmpty(portPattern)) {
if (sb.length() > 0) {
sb.append(portPattern);
}
}
if (!isNullOrEmpty(slashAfterAuthority)) {
if ("</>".equals(slashAfterAuthority)) {
if (sb.length() < 1) {
sb.append(OUTPUT_LEFT_ANCHOR_PATTERN_STRING);
sb.append("[^/]*//[^/]*");
}
}
sb.append(OUTPUT_SLASH);
}
if (!isNullOrEmpty(filePattern)) {
sb.append(filePattern);
}
if (!isNullOrEmpty(rightAnchor)) {
sb.append(rightAnchor);
}
return sb.toString();
}
private String buildHostRegex(Matcher m) {
StringBuilder sb = new StringBuilder();
String leftAnchor = getGroup(m, MetaRegexGroup.LEFT_ANCHOR);
String protocolUserinfoHostPattern = buildProtocolUserinfoHostPattern(m);
String portPattern = buildPortPattern(m);
String slashAfterAuthority = getGroup(m, MetaRegexGroup.SLASH_AFTER_AUTHORITY);
// prefix patterns need to be handled specially
if (!isNullOrEmpty(leftAnchor)) {
sb.append(OUTPUT_LEFT_ANCHOR_PATTERN_STRING);
}
if (!isNullOrEmpty(protocolUserinfoHostPattern)) {
sb.append(protocolUserinfoHostPattern);
}
if (!isNullOrEmpty(portPattern)) {
sb.append(portPattern);
}
if (!isNullOrEmpty(slashAfterAuthority)) {
sb.append(OUTPUT_SLASH);
}
return sb.toString();
}
// We expect that, in practice, the fileRegex will be used much more often
// than the protocolAuthority regex (there will probably be a hashtable for
// the protocol-authority portion), so we really want to makes sure that the
// fileRegexes are simple prefix matches, as often as possible.
private String buildPathRegex(Matcher m) {
boolean hasLeftAnchor = false;
boolean hasRightAnchor = false;
StringBuilder sb = new StringBuilder();
String protocolAuthority = getGroup(m, MetaRegexGroup.PROTOCOL_AUTHORITY);
String slashAfterAuthority = getGroup(m, MetaRegexGroup.SLASH_AFTER_AUTHORITY);
String unquotedFilePattern = buildUnquotedFilePattern(m);
String rightAnchor = getGroup(m, MetaRegexGroup.RIGHT_ANCHOR);
// two conditions for this being an prefix pattern:
// either there was a protocolAuthority OR there was a </>
// slashAfterAuthority
if (!isNullOrEmpty(protocolAuthority) || "</>".equals(slashAfterAuthority)) {
hasLeftAnchor = true;
sb.append(OUTPUT_LEFT_ANCHOR_PATTERN_STRING);
}
if (!isNullOrEmpty(slashAfterAuthority)) {
sb.append(OUTPUT_SLASH);
}
sb.append(Pattern.quote(unquotedFilePattern));
if (!isNullOrEmpty(rightAnchor)) {
hasRightAnchor = true;
sb.append(OUTPUT_RIGHT_ANCHOR_PATTERN_STRING);
}
if (hasLeftAnchor) {
if (hasRightAnchor) {
this.pathExactMatch = true;
this.pathExactMatchPattern = "/" + unquotedFilePattern;
this.prefixPathMatch = false;
this.prefixPathMatchPattern = null;
} else {
this.pathExactMatch = false;
this.pathExactMatchPattern = null;
this.prefixPathMatch = true;
this.prefixPathMatchPattern = "/" + unquotedFilePattern;
}
}
return sb.toString();
}
private static void checkPatternValidity(String s) {
Preconditions.checkNotNull(s);
Matcher m = UNSUPPORTED_PATTERNS_METAPATTERN.matcher(s);
Preconditions.checkArgument(!m.find(), "unsupported urlpattern: %s", s);
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.compute.model;
/**
* Message containing what to include in the cache key for a request for Cloud CDN.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Compute Engine API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class CacheKeyPolicy extends com.google.api.client.json.GenericJson {
/**
* If true, requests to different hosts will be cached separately.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean includeHost;
/**
* Allows HTTP request headers (by name) to be used in the cache key.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> includeHttpHeaders;
/**
* Allows HTTP cookies (by name) to be used in the cache key. The name=value pair will be used in
* the cache key Cloud CDN generates.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> includeNamedCookies;
/**
* If true, http and https requests will be cached separately.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean includeProtocol;
/**
* If true, include query string parameters in the cache key according to query_string_whitelist
* and query_string_blacklist. If neither is set, the entire query string will be included. If
* false, the query string will be excluded from the cache key entirely.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean includeQueryString;
/**
* Names of query string parameters to exclude in cache keys. All other parameters will be
* included. Either specify query_string_whitelist or query_string_blacklist, not both. '&' and
* '=' will be percent encoded and not treated as delimiters.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> queryStringBlacklist;
/**
* Names of query string parameters to include in cache keys. All other parameters will be
* excluded. Either specify query_string_whitelist or query_string_blacklist, not both. '&' and
* '=' will be percent encoded and not treated as delimiters.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> queryStringWhitelist;
/**
* If true, requests to different hosts will be cached separately.
* @return value or {@code null} for none
*/
public java.lang.Boolean getIncludeHost() {
return includeHost;
}
/**
* If true, requests to different hosts will be cached separately.
* @param includeHost includeHost or {@code null} for none
*/
public CacheKeyPolicy setIncludeHost(java.lang.Boolean includeHost) {
this.includeHost = includeHost;
return this;
}
/**
* Allows HTTP request headers (by name) to be used in the cache key.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getIncludeHttpHeaders() {
return includeHttpHeaders;
}
/**
* Allows HTTP request headers (by name) to be used in the cache key.
* @param includeHttpHeaders includeHttpHeaders or {@code null} for none
*/
public CacheKeyPolicy setIncludeHttpHeaders(java.util.List<java.lang.String> includeHttpHeaders) {
this.includeHttpHeaders = includeHttpHeaders;
return this;
}
/**
* Allows HTTP cookies (by name) to be used in the cache key. The name=value pair will be used in
* the cache key Cloud CDN generates.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getIncludeNamedCookies() {
return includeNamedCookies;
}
/**
* Allows HTTP cookies (by name) to be used in the cache key. The name=value pair will be used in
* the cache key Cloud CDN generates.
* @param includeNamedCookies includeNamedCookies or {@code null} for none
*/
public CacheKeyPolicy setIncludeNamedCookies(java.util.List<java.lang.String> includeNamedCookies) {
this.includeNamedCookies = includeNamedCookies;
return this;
}
/**
* If true, http and https requests will be cached separately.
* @return value or {@code null} for none
*/
public java.lang.Boolean getIncludeProtocol() {
return includeProtocol;
}
/**
* If true, http and https requests will be cached separately.
* @param includeProtocol includeProtocol or {@code null} for none
*/
public CacheKeyPolicy setIncludeProtocol(java.lang.Boolean includeProtocol) {
this.includeProtocol = includeProtocol;
return this;
}
/**
* If true, include query string parameters in the cache key according to query_string_whitelist
* and query_string_blacklist. If neither is set, the entire query string will be included. If
* false, the query string will be excluded from the cache key entirely.
* @return value or {@code null} for none
*/
public java.lang.Boolean getIncludeQueryString() {
return includeQueryString;
}
/**
* If true, include query string parameters in the cache key according to query_string_whitelist
* and query_string_blacklist. If neither is set, the entire query string will be included. If
* false, the query string will be excluded from the cache key entirely.
* @param includeQueryString includeQueryString or {@code null} for none
*/
public CacheKeyPolicy setIncludeQueryString(java.lang.Boolean includeQueryString) {
this.includeQueryString = includeQueryString;
return this;
}
/**
* Names of query string parameters to exclude in cache keys. All other parameters will be
* included. Either specify query_string_whitelist or query_string_blacklist, not both. '&' and
* '=' will be percent encoded and not treated as delimiters.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getQueryStringBlacklist() {
return queryStringBlacklist;
}
/**
* Names of query string parameters to exclude in cache keys. All other parameters will be
* included. Either specify query_string_whitelist or query_string_blacklist, not both. '&' and
* '=' will be percent encoded and not treated as delimiters.
* @param queryStringBlacklist queryStringBlacklist or {@code null} for none
*/
public CacheKeyPolicy setQueryStringBlacklist(java.util.List<java.lang.String> queryStringBlacklist) {
this.queryStringBlacklist = queryStringBlacklist;
return this;
}
/**
* Names of query string parameters to include in cache keys. All other parameters will be
* excluded. Either specify query_string_whitelist or query_string_blacklist, not both. '&' and
* '=' will be percent encoded and not treated as delimiters.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getQueryStringWhitelist() {
return queryStringWhitelist;
}
/**
* Names of query string parameters to include in cache keys. All other parameters will be
* excluded. Either specify query_string_whitelist or query_string_blacklist, not both. '&' and
* '=' will be percent encoded and not treated as delimiters.
* @param queryStringWhitelist queryStringWhitelist or {@code null} for none
*/
public CacheKeyPolicy setQueryStringWhitelist(java.util.List<java.lang.String> queryStringWhitelist) {
this.queryStringWhitelist = queryStringWhitelist;
return this;
}
@Override
public CacheKeyPolicy set(String fieldName, Object value) {
return (CacheKeyPolicy) super.set(fieldName, value);
}
@Override
public CacheKeyPolicy clone() {
return (CacheKeyPolicy) super.clone();
}
}
| |
/*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* This file is available under and governed by the GNU General Public
* License version 2 only, as published by the Free Software Foundation.
* However, the following notice accompanied the original version of this
* file:
*
* Written by Doug Lea with assistance from members of JCP JSR-166
* Expert Group and released to the public domain, as explained at
* http://creativecommons.org/publicdomain/zero/1.0/
*/
package jephyr.java.util.concurrent;
import java.util.AbstractQueue;
import java.util.Collection;
import java.util.Iterator;
import java.util.NoSuchElementException;
import java.util.Spliterator;
import java.util.Spliterators;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Consumer;
import jephyr.java.util.concurrent.locks.Condition;
import jephyr.java.util.concurrent.locks.ReentrantLock;
/**
* An optionally-bounded {@linkplain BlockingQueue blocking queue} based on
* linked nodes.
* This queue orders elements FIFO (first-in-first-out).
* The <em>head</em> of the queue is that element that has been on the
* queue the longest time.
* The <em>tail</em> of the queue is that element that has been on the
* queue the shortest time. New elements
* are inserted at the tail of the queue, and the queue retrieval
* operations obtain elements at the head of the queue.
* Linked queues typically have higher throughput than array-based queues but
* less predictable performance in most concurrent applications.
*
* <p>The optional capacity bound constructor argument serves as a
* way to prevent excessive queue expansion. The capacity, if unspecified,
* is equal to {@link Integer#MAX_VALUE}. Linked nodes are
* dynamically created upon each insertion unless this would bring the
* queue above capacity.
*
* <p>This class and its iterator implement all of the
* <em>optional</em> methods of the {@link Collection} and {@link
* Iterator} interfaces.
*
* <p>This class is a member of the
* <a href="{@docRoot}/../technotes/guides/collections/index.html">
* Java Collections Framework</a>.
*
* @since 1.5
* @author Doug Lea
* @param <E> the type of elements held in this collection
*/
public class LinkedBlockingQueue<E> extends AbstractQueue<E>
implements BlockingQueue<E>, java.io.Serializable {
private static final long serialVersionUID = -6903933977591709194L;
/*
* A variant of the "two lock queue" algorithm. The putLock gates
* entry to put (and offer), and has an associated condition for
* waiting puts. Similarly for the takeLock. The "count" field
* that they both rely on is maintained as an atomic to avoid
* needing to get both locks in most cases. Also, to minimize need
* for puts to get takeLock and vice-versa, cascading notifies are
* used. When a put notices that it has enabled at least one take,
* it signals taker. That taker in turn signals others if more
* items have been entered since the signal. And symmetrically for
* takes signalling puts. Operations such as remove(Object) and
* iterators acquire both locks.
*
* Visibility between writers and readers is provided as follows:
*
* Whenever an element is enqueued, the putLock is acquired and
* count updated. A subsequent reader guarantees visibility to the
* enqueued Node by either acquiring the putLock (via fullyLock)
* or by acquiring the takeLock, and then reading n = count.get();
* this gives visibility to the first n items.
*
* To implement weakly consistent iterators, it appears we need to
* keep all Nodes GC-reachable from a predecessor dequeued Node.
* That would cause two problems:
* - allow a rogue Iterator to cause unbounded memory retention
* - cause cross-generational linking of old Nodes to new Nodes if
* a Node was tenured while live, which generational GCs have a
* hard time dealing with, causing repeated major collections.
* However, only non-deleted Nodes need to be reachable from
* dequeued Nodes, and reachability does not necessarily have to
* be of the kind understood by the GC. We use the trick of
* linking a Node that has just been dequeued to itself. Such a
* self-link implicitly means to advance to head.next.
*/
/**
* Linked list node class
*/
static class Node<E> {
E item;
/**
* One of:
* - the real successor Node
* - this Node, meaning the successor is head.next
* - null, meaning there is no successor (this is the last node)
*/
Node<E> next;
Node(E x) { item = x; }
}
/** The capacity bound, or Integer.MAX_VALUE if none */
private final int capacity;
/** Current number of elements */
private final AtomicInteger count = new AtomicInteger();
/**
* Head of linked list.
* Invariant: head.item == null
*/
transient Node<E> head;
/**
* Tail of linked list.
* Invariant: last.next == null
*/
private transient Node<E> last;
/** Lock held by take, poll, etc */
private final ReentrantLock takeLock = new ReentrantLock();
/** Wait queue for waiting takes */
private final Condition notEmpty = takeLock.newCondition();
/** Lock held by put, offer, etc */
private final ReentrantLock putLock = new ReentrantLock();
/** Wait queue for waiting puts */
private final Condition notFull = putLock.newCondition();
/**
* Signals a waiting take. Called only from put/offer (which do not
* otherwise ordinarily lock takeLock.)
*/
private void signalNotEmpty() {
final ReentrantLock takeLock = this.takeLock;
takeLock.lock();
try {
notEmpty.signal();
} finally {
takeLock.unlock();
}
}
/**
* Signals a waiting put. Called only from take/poll.
*/
private void signalNotFull() {
final ReentrantLock putLock = this.putLock;
putLock.lock();
try {
notFull.signal();
} finally {
putLock.unlock();
}
}
/**
* Links node at end of queue.
*
* @param node the node
*/
private void enqueue(Node<E> node) {
// assert putLock.isHeldByCurrentThread();
// assert last.next == null;
last = last.next = node;
}
/**
* Removes a node from head of queue.
*
* @return the node
*/
private E dequeue() {
// assert takeLock.isHeldByCurrentThread();
// assert head.item == null;
Node<E> h = head;
Node<E> first = h.next;
h.next = h; // help GC
head = first;
E x = first.item;
first.item = null;
return x;
}
/**
* Locks to prevent both puts and takes.
*/
void fullyLock() {
putLock.lock();
takeLock.lock();
}
/**
* Unlocks to allow both puts and takes.
*/
void fullyUnlock() {
takeLock.unlock();
putLock.unlock();
}
// /**
// * Tells whether both locks are held by current thread.
// */
// boolean isFullyLocked() {
// return (putLock.isHeldByCurrentThread() &&
// takeLock.isHeldByCurrentThread());
// }
/**
* Creates a {@code LinkedBlockingQueue} with a capacity of
* {@link Integer#MAX_VALUE}.
*/
public LinkedBlockingQueue() {
this(Integer.MAX_VALUE);
}
/**
* Creates a {@code LinkedBlockingQueue} with the given (fixed) capacity.
*
* @param capacity the capacity of this queue
* @throws IllegalArgumentException if {@code capacity} is not greater
* than zero
*/
public LinkedBlockingQueue(int capacity) {
if (capacity <= 0) throw new IllegalArgumentException();
this.capacity = capacity;
last = head = new Node<E>(null);
}
/**
* Creates a {@code LinkedBlockingQueue} with a capacity of
* {@link Integer#MAX_VALUE}, initially containing the elements of the
* given collection,
* added in traversal order of the collection's iterator.
*
* @param c the collection of elements to initially contain
* @throws NullPointerException if the specified collection or any
* of its elements are null
*/
public LinkedBlockingQueue(Collection<? extends E> c) {
this(Integer.MAX_VALUE);
final ReentrantLock putLock = this.putLock;
putLock.lock(); // Never contended, but necessary for visibility
try {
int n = 0;
for (E e : c) {
if (e == null)
throw new NullPointerException();
if (n == capacity)
throw new IllegalStateException("Queue full");
enqueue(new Node<E>(e));
++n;
}
count.set(n);
} finally {
putLock.unlock();
}
}
// this doc comment is overridden to remove the reference to collections
// greater in size than Integer.MAX_VALUE
/**
* Returns the number of elements in this queue.
*
* @return the number of elements in this queue
*/
public int size() {
return count.get();
}
// this doc comment is a modified copy of the inherited doc comment,
// without the reference to unlimited queues.
/**
* Returns the number of additional elements that this queue can ideally
* (in the absence of memory or resource constraints) accept without
* blocking. This is always equal to the initial capacity of this queue
* less the current {@code size} of this queue.
*
* <p>Note that you <em>cannot</em> always tell if an attempt to insert
* an element will succeed by inspecting {@code remainingCapacity}
* because it may be the case that another thread is about to
* insert or remove an element.
*/
public int remainingCapacity() {
return capacity - count.get();
}
/**
* Inserts the specified element at the tail of this queue, waiting if
* necessary for space to become available.
*
* @throws InterruptedException {@inheritDoc}
* @throws NullPointerException {@inheritDoc}
*/
public void put(E e) throws InterruptedException {
if (e == null) throw new NullPointerException();
// Note: convention in all put/take/etc is to preset local var
// holding count negative to indicate failure unless set.
int c = -1;
Node<E> node = new Node<E>(e);
final ReentrantLock putLock = this.putLock;
final AtomicInteger count = this.count;
putLock.lockInterruptibly();
try {
/*
* Note that count is used in wait guard even though it is
* not protected by lock. This works because count can
* only decrease at this point (all other puts are shut
* out by lock), and we (or some other waiting put) are
* signalled if it ever changes from capacity. Similarly
* for all other uses of count in other wait guards.
*/
while (count.get() == capacity) {
notFull.await();
}
enqueue(node);
c = count.getAndIncrement();
if (c + 1 < capacity)
notFull.signal();
} finally {
putLock.unlock();
}
if (c == 0)
signalNotEmpty();
}
/**
* Inserts the specified element at the tail of this queue, waiting if
* necessary up to the specified wait time for space to become available.
*
* @return {@code true} if successful, or {@code false} if
* the specified waiting time elapses before space is available
* @throws InterruptedException {@inheritDoc}
* @throws NullPointerException {@inheritDoc}
*/
public boolean offer(E e, long timeout, TimeUnit unit)
throws InterruptedException {
if (e == null) throw new NullPointerException();
long nanos = unit.toNanos(timeout);
int c = -1;
final ReentrantLock putLock = this.putLock;
final AtomicInteger count = this.count;
putLock.lockInterruptibly();
try {
while (count.get() == capacity) {
if (nanos <= 0)
return false;
nanos = notFull.awaitNanos(nanos);
}
enqueue(new Node<E>(e));
c = count.getAndIncrement();
if (c + 1 < capacity)
notFull.signal();
} finally {
putLock.unlock();
}
if (c == 0)
signalNotEmpty();
return true;
}
/**
* Inserts the specified element at the tail of this queue if it is
* possible to do so immediately without exceeding the queue's capacity,
* returning {@code true} upon success and {@code false} if this queue
* is full.
* When using a capacity-restricted queue, this method is generally
* preferable to method {@link BlockingQueue#add add}, which can fail to
* insert an element only by throwing an exception.
*
* @throws NullPointerException if the specified element is null
*/
public boolean offer(E e) {
if (e == null) throw new NullPointerException();
final AtomicInteger count = this.count;
if (count.get() == capacity)
return false;
int c = -1;
Node<E> node = new Node<E>(e);
final ReentrantLock putLock = this.putLock;
putLock.lock();
try {
if (count.get() < capacity) {
enqueue(node);
c = count.getAndIncrement();
if (c + 1 < capacity)
notFull.signal();
}
} finally {
putLock.unlock();
}
if (c == 0)
signalNotEmpty();
return c >= 0;
}
public E take() throws InterruptedException {
E x;
int c = -1;
final AtomicInteger count = this.count;
final ReentrantLock takeLock = this.takeLock;
takeLock.lockInterruptibly();
try {
while (count.get() == 0) {
notEmpty.await();
}
x = dequeue();
c = count.getAndDecrement();
if (c > 1)
notEmpty.signal();
} finally {
takeLock.unlock();
}
if (c == capacity)
signalNotFull();
return x;
}
public E poll(long timeout, TimeUnit unit) throws InterruptedException {
E x = null;
int c = -1;
long nanos = unit.toNanos(timeout);
final AtomicInteger count = this.count;
final ReentrantLock takeLock = this.takeLock;
takeLock.lockInterruptibly();
try {
while (count.get() == 0) {
if (nanos <= 0)
return null;
nanos = notEmpty.awaitNanos(nanos);
}
x = dequeue();
c = count.getAndDecrement();
if (c > 1)
notEmpty.signal();
} finally {
takeLock.unlock();
}
if (c == capacity)
signalNotFull();
return x;
}
public E poll() {
final AtomicInteger count = this.count;
if (count.get() == 0)
return null;
E x = null;
int c = -1;
final ReentrantLock takeLock = this.takeLock;
takeLock.lock();
try {
if (count.get() > 0) {
x = dequeue();
c = count.getAndDecrement();
if (c > 1)
notEmpty.signal();
}
} finally {
takeLock.unlock();
}
if (c == capacity)
signalNotFull();
return x;
}
public E peek() {
if (count.get() == 0)
return null;
final ReentrantLock takeLock = this.takeLock;
takeLock.lock();
try {
Node<E> first = head.next;
if (first == null)
return null;
else
return first.item;
} finally {
takeLock.unlock();
}
}
/**
* Unlinks interior Node p with predecessor trail.
*/
void unlink(Node<E> p, Node<E> trail) {
// assert isFullyLocked();
// p.next is not changed, to allow iterators that are
// traversing p to maintain their weak-consistency guarantee.
p.item = null;
trail.next = p.next;
if (last == p)
last = trail;
if (count.getAndDecrement() == capacity)
notFull.signal();
}
/**
* Removes a single instance of the specified element from this queue,
* if it is present. More formally, removes an element {@code e} such
* that {@code o.equals(e)}, if this queue contains one or more such
* elements.
* Returns {@code true} if this queue contained the specified element
* (or equivalently, if this queue changed as a result of the call).
*
* @param o element to be removed from this queue, if present
* @return {@code true} if this queue changed as a result of the call
*/
public boolean remove(Object o) {
if (o == null) return false;
fullyLock();
try {
for (Node<E> trail = head, p = trail.next;
p != null;
trail = p, p = p.next) {
if (o.equals(p.item)) {
unlink(p, trail);
return true;
}
}
return false;
} finally {
fullyUnlock();
}
}
/**
* Returns {@code true} if this queue contains the specified element.
* More formally, returns {@code true} if and only if this queue contains
* at least one element {@code e} such that {@code o.equals(e)}.
*
* @param o object to be checked for containment in this queue
* @return {@code true} if this queue contains the specified element
*/
public boolean contains(Object o) {
if (o == null) return false;
fullyLock();
try {
for (Node<E> p = head.next; p != null; p = p.next)
if (o.equals(p.item))
return true;
return false;
} finally {
fullyUnlock();
}
}
/**
* Returns an array containing all of the elements in this queue, in
* proper sequence.
*
* <p>The returned array will be "safe" in that no references to it are
* maintained by this queue. (In other words, this method must allocate
* a new array). The caller is thus free to modify the returned array.
*
* <p>This method acts as bridge between array-based and collection-based
* APIs.
*
* @return an array containing all of the elements in this queue
*/
public Object[] toArray() {
fullyLock();
try {
int size = count.get();
Object[] a = new Object[size];
int k = 0;
for (Node<E> p = head.next; p != null; p = p.next)
a[k++] = p.item;
return a;
} finally {
fullyUnlock();
}
}
/**
* Returns an array containing all of the elements in this queue, in
* proper sequence; the runtime type of the returned array is that of
* the specified array. If the queue fits in the specified array, it
* is returned therein. Otherwise, a new array is allocated with the
* runtime type of the specified array and the size of this queue.
*
* <p>If this queue fits in the specified array with room to spare
* (i.e., the array has more elements than this queue), the element in
* the array immediately following the end of the queue is set to
* {@code null}.
*
* <p>Like the {@link #toArray()} method, this method acts as bridge between
* array-based and collection-based APIs. Further, this method allows
* precise control over the runtime type of the output array, and may,
* under certain circumstances, be used to save allocation costs.
*
* <p>Suppose {@code x} is a queue known to contain only strings.
* The following code can be used to dump the queue into a newly
* allocated array of {@code String}:
*
* <pre> {@code String[] y = x.toArray(new String[0]);}</pre>
*
* Note that {@code toArray(new Object[0])} is identical in function to
* {@code toArray()}.
*
* @param a the array into which the elements of the queue are to
* be stored, if it is big enough; otherwise, a new array of the
* same runtime type is allocated for this purpose
* @return an array containing all of the elements in this queue
* @throws ArrayStoreException if the runtime type of the specified array
* is not a supertype of the runtime type of every element in
* this queue
* @throws NullPointerException if the specified array is null
*/
@SuppressWarnings("unchecked")
public <T> T[] toArray(T[] a) {
fullyLock();
try {
int size = count.get();
if (a.length < size)
a = (T[])java.lang.reflect.Array.newInstance
(a.getClass().getComponentType(), size);
int k = 0;
for (Node<E> p = head.next; p != null; p = p.next)
a[k++] = (T)p.item;
if (a.length > k)
a[k] = null;
return a;
} finally {
fullyUnlock();
}
}
public String toString() {
fullyLock();
try {
Node<E> p = head.next;
if (p == null)
return "[]";
StringBuilder sb = new StringBuilder();
sb.append('[');
for (;;) {
E e = p.item;
sb.append(e == this ? "(this Collection)" : e);
p = p.next;
if (p == null)
return sb.append(']').toString();
sb.append(',').append(' ');
}
} finally {
fullyUnlock();
}
}
/**
* Atomically removes all of the elements from this queue.
* The queue will be empty after this call returns.
*/
public void clear() {
fullyLock();
try {
for (Node<E> p, h = head; (p = h.next) != null; h = p) {
h.next = h;
p.item = null;
}
head = last;
// assert head.item == null && head.next == null;
if (count.getAndSet(0) == capacity)
notFull.signal();
} finally {
fullyUnlock();
}
}
/**
* @throws UnsupportedOperationException {@inheritDoc}
* @throws ClassCastException {@inheritDoc}
* @throws NullPointerException {@inheritDoc}
* @throws IllegalArgumentException {@inheritDoc}
*/
public int drainTo(Collection<? super E> c) {
return drainTo(c, Integer.MAX_VALUE);
}
/**
* @throws UnsupportedOperationException {@inheritDoc}
* @throws ClassCastException {@inheritDoc}
* @throws NullPointerException {@inheritDoc}
* @throws IllegalArgumentException {@inheritDoc}
*/
public int drainTo(Collection<? super E> c, int maxElements) {
if (c == null)
throw new NullPointerException();
if (c == this)
throw new IllegalArgumentException();
if (maxElements <= 0)
return 0;
boolean signalNotFull = false;
final ReentrantLock takeLock = this.takeLock;
takeLock.lock();
try {
int n = Math.min(maxElements, count.get());
// count.get provides visibility to first n Nodes
Node<E> h = head;
int i = 0;
try {
while (i < n) {
Node<E> p = h.next;
c.add(p.item);
p.item = null;
h.next = h;
h = p;
++i;
}
return n;
} finally {
// Restore invariants even if c.add() threw
if (i > 0) {
// assert h.item == null;
head = h;
signalNotFull = (count.getAndAdd(-i) == capacity);
}
}
} finally {
takeLock.unlock();
if (signalNotFull)
signalNotFull();
}
}
/**
* Returns an iterator over the elements in this queue in proper sequence.
* The elements will be returned in order from first (head) to last (tail).
*
* <p>The returned iterator is
* <a href="package-summary.html#Weakly"><i>weakly consistent</i></a>.
*
* @return an iterator over the elements in this queue in proper sequence
*/
public Iterator<E> iterator() {
return new Itr();
}
private class Itr implements Iterator<E> {
/*
* Basic weakly-consistent iterator. At all times hold the next
* item to hand out so that if hasNext() reports true, we will
* still have it to return even if lost race with a take etc.
*/
private Node<E> current;
private Node<E> lastRet;
private E currentElement;
Itr() {
fullyLock();
try {
current = head.next;
if (current != null)
currentElement = current.item;
} finally {
fullyUnlock();
}
}
public boolean hasNext() {
return current != null;
}
/**
* Returns the next live successor of p, or null if no such.
*
* Unlike other traversal methods, iterators need to handle both:
* - dequeued nodes (p.next == p)
* - (possibly multiple) interior removed nodes (p.item == null)
*/
private Node<E> nextNode(Node<E> p) {
for (;;) {
Node<E> s = p.next;
if (s == p)
return head.next;
if (s == null || s.item != null)
return s;
p = s;
}
}
public E next() {
fullyLock();
try {
if (current == null)
throw new NoSuchElementException();
E x = currentElement;
lastRet = current;
current = nextNode(current);
currentElement = (current == null) ? null : current.item;
return x;
} finally {
fullyUnlock();
}
}
public void remove() {
if (lastRet == null)
throw new IllegalStateException();
fullyLock();
try {
Node<E> node = lastRet;
lastRet = null;
for (Node<E> trail = head, p = trail.next;
p != null;
trail = p, p = p.next) {
if (p == node) {
unlink(p, trail);
break;
}
}
} finally {
fullyUnlock();
}
}
}
/** A customized variant of Spliterators.IteratorSpliterator */
static final class LBQSpliterator<E> implements Spliterator<E> {
static final int MAX_BATCH = 1 << 25; // max batch array size;
final LinkedBlockingQueue<E> queue;
Node<E> current; // current node; null until initialized
int batch; // batch size for splits
boolean exhausted; // true when no more nodes
long est; // size estimate
LBQSpliterator(LinkedBlockingQueue<E> queue) {
this.queue = queue;
this.est = queue.size();
}
public long estimateSize() { return est; }
public Spliterator<E> trySplit() {
Node<E> h;
final LinkedBlockingQueue<E> q = this.queue;
int b = batch;
int n = (b <= 0) ? 1 : (b >= MAX_BATCH) ? MAX_BATCH : b + 1;
if (!exhausted &&
((h = current) != null || (h = q.head.next) != null) &&
h.next != null) {
Object[] a = new Object[n];
int i = 0;
Node<E> p = current;
q.fullyLock();
try {
if (p != null || (p = q.head.next) != null) {
do {
if ((a[i] = p.item) != null)
++i;
} while ((p = p.next) != null && i < n);
}
} finally {
q.fullyUnlock();
}
if ((current = p) == null) {
est = 0L;
exhausted = true;
}
else if ((est -= i) < 0L)
est = 0L;
if (i > 0) {
batch = i;
return Spliterators.spliterator
(a, 0, i, Spliterator.ORDERED | Spliterator.NONNULL |
Spliterator.CONCURRENT);
}
}
return null;
}
public void forEachRemaining(Consumer<? super E> action) {
if (action == null) throw new NullPointerException();
final LinkedBlockingQueue<E> q = this.queue;
if (!exhausted) {
exhausted = true;
Node<E> p = current;
do {
E e = null;
q.fullyLock();
try {
if (p == null)
p = q.head.next;
while (p != null) {
e = p.item;
p = p.next;
if (e != null)
break;
}
} finally {
q.fullyUnlock();
}
if (e != null)
action.accept(e);
} while (p != null);
}
}
public boolean tryAdvance(Consumer<? super E> action) {
if (action == null) throw new NullPointerException();
final LinkedBlockingQueue<E> q = this.queue;
if (!exhausted) {
E e = null;
q.fullyLock();
try {
if (current == null)
current = q.head.next;
while (current != null) {
e = current.item;
current = current.next;
if (e != null)
break;
}
} finally {
q.fullyUnlock();
}
if (current == null)
exhausted = true;
if (e != null) {
action.accept(e);
return true;
}
}
return false;
}
public int characteristics() {
return Spliterator.ORDERED | Spliterator.NONNULL |
Spliterator.CONCURRENT;
}
}
/**
* Returns a {@link Spliterator} over the elements in this queue.
*
* <p>The returned spliterator is
* <a href="package-summary.html#Weakly"><i>weakly consistent</i></a>.
*
* <p>The {@code Spliterator} reports {@link Spliterator#CONCURRENT},
* {@link Spliterator#ORDERED}, and {@link Spliterator#NONNULL}.
*
* @implNote
* The {@code Spliterator} implements {@code trySplit} to permit limited
* parallelism.
*
* @return a {@code Spliterator} over the elements in this queue
* @since 1.8
*/
public Spliterator<E> spliterator() {
return new LBQSpliterator<E>(this);
}
/**
* Saves this queue to a stream (that is, serializes it).
*
* @param s the stream
* @throws java.io.IOException if an I/O error occurs
* @serialData The capacity is emitted (int), followed by all of
* its elements (each an {@code Object}) in the proper order,
* followed by a null
*/
private void writeObject(java.io.ObjectOutputStream s)
throws java.io.IOException {
fullyLock();
try {
// Write out any hidden stuff, plus capacity
s.defaultWriteObject();
// Write out all elements in the proper order.
for (Node<E> p = head.next; p != null; p = p.next)
s.writeObject(p.item);
// Use trailing null as sentinel
s.writeObject(null);
} finally {
fullyUnlock();
}
}
/**
* Reconstitutes this queue from a stream (that is, deserializes it).
* @param s the stream
* @throws ClassNotFoundException if the class of a serialized object
* could not be found
* @throws java.io.IOException if an I/O error occurs
*/
private void readObject(java.io.ObjectInputStream s)
throws java.io.IOException, ClassNotFoundException {
// Read in capacity, and any hidden stuff
s.defaultReadObject();
count.set(0);
last = head = new Node<E>(null);
// Read in all elements and place in queue
for (;;) {
@SuppressWarnings("unchecked")
E item = (E)s.readObject();
if (item == null)
break;
add(item);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache.tier.sockets;
import static org.apache.geode.test.awaitility.GeodeAwaitility.await;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.net.ConnectException;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.mockito.Mockito;
import org.apache.geode.cache.Cache;
import org.apache.geode.cache.Region;
import org.apache.geode.cache.client.Pool;
import org.apache.geode.cache.client.PoolManager;
import org.apache.geode.cache.client.internal.PoolImpl;
import org.apache.geode.cache.query.CqAttributesFactory;
import org.apache.geode.cache.query.CqListener;
import org.apache.geode.cache.query.CqQuery;
import org.apache.geode.cache.server.CacheServer;
import org.apache.geode.distributed.DistributedSystem;
import org.apache.geode.internal.Version;
import org.apache.geode.internal.cache.EventID;
import org.apache.geode.internal.cache.LocalRegion;
import org.apache.geode.test.dunit.Host;
import org.apache.geode.test.dunit.IgnoredException;
import org.apache.geode.test.dunit.NetworkUtils;
import org.apache.geode.test.dunit.VM;
import org.apache.geode.test.junit.categories.BackwardCompatibilityTest;
import org.apache.geode.test.junit.categories.ClientServerTest;
import org.apache.geode.test.junit.runners.CategoryWithParameterizedRunnerFactory;
import org.apache.geode.test.version.VersionManager;
@Category({ClientServerTest.class, BackwardCompatibilityTest.class})
@RunWith(Parameterized.class)
@Parameterized.UseParametersRunnerFactory(CategoryWithParameterizedRunnerFactory.class)
public class ClientServerMiscBCDUnitTest extends ClientServerMiscDUnitTestBase {
@Parameterized.Parameters
public static Collection<String> data() {
List<String> result = VersionManager.getInstance().getVersionsWithoutCurrent();
if (result.size() < 1) {
throw new RuntimeException("No older versions of Geode were found to test against");
} else {
System.out.println("running against these versions: " + result);
}
return result;
}
public ClientServerMiscBCDUnitTest(String version) {
super();
testVersion = version;
}
@Override
void createClientCacheAndVerifyPingIntervalIsSet(String host, int port) throws Exception {
// this functionality was introduced in 1.5. If we let the test run in older
// clients it will throw a NoSuchMethodError
if (Version.CURRENT_ORDINAL >= 80 /* GEODE_1_5_0 */) {
super.createClientCacheAndVerifyPingIntervalIsSet(host, port);
}
}
@Test
public void testSubscriptionWithCurrentServerAndOldClients() throws Exception {
// start server first
int serverPort = initServerCache(true);
VM client1 = Host.getHost(0).getVM(testVersion, 1);
VM client2 = Host.getHost(0).getVM(testVersion, 3);
String hostname = NetworkUtils.getServerHostName(Host.getHost(0));
client1.invoke("create client1 cache", () -> {
createClientCache(hostname, serverPort);
populateCache();
registerInterest();
});
client2.invoke("create client2 cache", () -> {
Pool ignore = createClientCache(hostname, serverPort);
});
client2.invoke("putting data in client2", () -> putForClient());
// client1 will receive client2's updates asynchronously
client1.invoke(() -> {
Region r2 = getCache().getRegion(REGION_NAME2);
MemberIDVerifier verifier = (MemberIDVerifier) ((LocalRegion) r2).getCacheListener();
await().until(() -> verifier.eventReceived);
});
// client2's update should have included a memberID - GEODE-2954
client1.invoke(() -> {
Region r2 = getCache().getRegion(REGION_NAME2);
MemberIDVerifier verifier = (MemberIDVerifier) ((LocalRegion) r2).getCacheListener();
assertFalse(verifier.memberIDNotReceived);
});
}
@Test
public void testSubscriptionWithMixedServersAndNewPeerFeed() throws Exception {
doTestSubscriptionWithMixedServersAndPeerFeed(VersionManager.CURRENT_VERSION, true);
}
@Test
public void testSubscriptionWithMixedServersAndOldPeerFeed() throws Exception {
doTestSubscriptionWithMixedServersAndPeerFeed(testVersion, true);
}
@Test
public void testSubscriptionWithMixedServersAndOldClientFeed() throws Exception {
doTestSubscriptionWithMixedServersAndPeerFeed(testVersion, false);
}
private void doTestSubscriptionWithMixedServersAndPeerFeed(String version,
boolean usePeerForFeed) {
server1 = Host.getHost(0).getVM(testVersion, 2);
server2 = Host.getHost(0).getVM(VersionManager.CURRENT_VERSION, 3);
VM server3 = Host.getHost(0).getVM(VersionManager.CURRENT_VERSION, 4);
VM interestClient = Host.getHost(0).getVM(testVersion, 0);
VM feeder = Host.getHost(0).getVM(version, 1);
// start servers first
int server1Port = initServerCache(true);
int server2Port = initServerCache2();
int server3Port = server3.invoke(() -> createServerCache(true, getMaxThreads(), false));
System.out.println("old server is vm 2 and new server is vm 3");
System.out
.println("old server port is " + server1Port + " and new server port is " + server2Port);
String hostname = NetworkUtils.getServerHostName(Host.getHost(0));
interestClient.invoke("create interestClient cache", () -> {
createClientCache(hostname, 300000, false, server1Port, server2Port, server3Port);
populateCache();
registerInterest();
});
if (!usePeerForFeed) {
feeder.invoke("create client cache for feed", () -> {
Pool ignore = createClientCache(hostname, server1Port);
});
}
feeder.invoke("putting data in feeder", () -> putForClient());
// interestClient will receive feeder's updates asynchronously
interestClient.invoke("verification 1", () -> {
Region r2 = getCache().getRegion(REGION_NAME2);
MemberIDVerifier verifier = (MemberIDVerifier) ((LocalRegion) r2).getCacheListener();
await().until(() -> verifier.eventReceived);
verifier.reset();
});
server1.invoke("shutdown old server", () -> {
getCache().getDistributedSystem().disconnect();
});
server2.invoke("wait for failover queue to drain", () -> {
CacheClientProxy proxy =
CacheClientNotifier.getInstance().getClientProxies().iterator().next();
await()
.until(() -> proxy.getHARegionQueue().isEmpty());
});
// the client should now get duplicate events from the current-version server
interestClient.invoke("verification 2", () -> {
Cache cache = getCache();
Region r2 = cache.getRegion(REGION_NAME2);
MemberIDVerifier verifier = (MemberIDVerifier) ((LocalRegion) r2).getCacheListener();
assertFalse(verifier.eventReceived); // no duplicate events should have arrived
PoolImpl pool = (PoolImpl) PoolManager.find("ClientServerMiscDUnitTestPool");
Map seqMap = pool.getThreadIdToSequenceIdMap();
assertEquals(3, seqMap.size()); // one for each server and one for the feed
verifier.reset();
});
server2.invoke("shutdown new server", () -> {
getCache().getDistributedSystem().disconnect();
});
server3.invoke("wait for failover queue to drain", () -> {
CacheClientProxy proxy =
CacheClientNotifier.getInstance().getClientProxies().iterator().next();
await()
.until(() -> proxy.getHARegionQueue().isEmpty());
});
// the client should now get duplicate events from the current-version server
interestClient.invoke("verification 3", () -> {
Cache cache = getCache();
Region r2 = cache.getRegion(REGION_NAME2);
MemberIDVerifier verifier = (MemberIDVerifier) ((LocalRegion) r2).getCacheListener();
assertFalse(verifier.eventReceived); // no duplicate events should have arrived
PoolImpl pool = (PoolImpl) PoolManager.find("ClientServerMiscDUnitTestPool");
Map seqMap = pool.getThreadIdToSequenceIdMap();
assertEquals(4, seqMap.size()); // one for each server and one for the feed
});
}
@Test
public void giiEventQueueFromOldToCurrentMemberShouldSucceed() {
giiEventQueueShouldSucceedWithMixedVersions(testVersion, VersionManager.CURRENT_VERSION);
}
@Test
public void giiEventQueueFromCurrentToOldMemberShouldSucceed() {
final IgnoredException expectedEx =
IgnoredException.addIgnoredException(ConnectException.class.getName());
giiEventQueueShouldSucceedWithMixedVersions(VersionManager.CURRENT_VERSION, testVersion);
expectedEx.remove();
}
public void giiEventQueueShouldSucceedWithMixedVersions(String server1Version,
String server2Version) {
VM interestClient = Host.getHost(0).getVM(testVersion, 0);
VM feeder = Host.getHost(0).getVM(VersionManager.CURRENT_VERSION, 1);
server1 = Host.getHost(0).getVM(server1Version, 2);
server2 = Host.getHost(0).getVM(server2Version, 3);
// start servers first
int server1Port = initServerCache(true, server1, true);
int server2Port = initServerCache(true, server2, true);
server2.invoke(() -> {
getCache().getCacheServers().stream().forEach(CacheServer::stop);
});
String hostname = NetworkUtils.getServerHostName(Host.getHost(0));
interestClient.invoke("create interestClient cache", () -> {
createClientCache(hostname, 300000, false, server1Port, server2Port);
registerInterest();
registerCQ();
});
feeder.invoke("putting data in feeder", () -> putForClient());
// Start server 2
server2.invoke(() -> {
for (CacheServer server : getCache().getCacheServers()) {
server.start();
}
});
// Make sure server 2 copies the queue
server2.invoke(() -> {
await().untilAsserted(() -> {
final Collection<CacheClientProxy> clientProxies =
CacheClientNotifier.getInstance().getClientProxies();
assertFalse(clientProxies.isEmpty());
CacheClientProxy proxy = clientProxies.iterator().next();
assertFalse(proxy.getHARegionQueue().isEmpty());
});
});
// interestClient will receive feeder's updates asynchronously
interestClient.invoke("verification 1", () -> {
Region r2 = getCache().getRegion(REGION_NAME2);
MemberIDVerifier verifier = (MemberIDVerifier) ((LocalRegion) r2).getCacheListener();
await().until(() -> verifier.eventReceived);
verifier.reset();
});
server1.invoke("shutdown old server", () -> {
getCache().getDistributedSystem().disconnect();
});
server2.invoke("wait for failover queue to drain", () -> {
CacheClientProxy proxy =
CacheClientNotifier.getInstance().getClientProxies().iterator().next();
await()
.until(() -> proxy.getHARegionQueue().isEmpty());
});
}
public static void registerCQ() throws Exception {
Cache cache = new ClientServerMiscDUnitTestBase().getCache();
Region r = cache.getRegion(Region.SEPARATOR + REGION_NAME2);
assertNotNull(r);
CqAttributesFactory cqAttributesFactory = new CqAttributesFactory();
cqAttributesFactory.addCqListener(Mockito.mock(CqListener.class));
final CqQuery cq = cache.getQueryService().newCq("testCQ", "select * from " + r.getFullPath(),
cqAttributesFactory.create());
cq.execute();
}
@Test
public void testDistributedMemberBytesWithCurrentServerAndOldClient() throws Exception {
// Start current version server
int serverPort = initServerCache(true);
// Start old version client and do puts
VM client = Host.getHost(0).getVM(testVersion, 1);
String hostname = NetworkUtils.getServerHostName(Host.getHost(0));
client.invoke("create client cache", () -> {
createClientCache(hostname, serverPort);
populateCache();
});
// Get client member id byte array on client
byte[] clientMembershipIdBytesOnClient =
client.invoke(() -> getClientMembershipIdBytesOnClient());
// Get client member id byte array on server
byte[] clientMembershipIdBytesOnServer =
server1.invoke(() -> getClientMembershipIdBytesOnServer());
// Verify member id bytes on client and server are equal
String complaint = "size on client=" + clientMembershipIdBytesOnClient.length
+ "; size on server=" + clientMembershipIdBytesOnServer.length + "\nclient bytes="
+ Arrays.toString(clientMembershipIdBytesOnClient) + "\nserver bytes="
+ Arrays.toString(clientMembershipIdBytesOnServer);
assertTrue(complaint,
Arrays.equals(clientMembershipIdBytesOnClient, clientMembershipIdBytesOnServer));
}
private byte[] getClientMembershipIdBytesOnClient() {
DistributedSystem system = getCache().getDistributedSystem();
byte[] result =
EventID.getMembershipId(new ClientProxyMembershipID(system.getDistributedMember()));
System.out.println("client ID bytes are " + Arrays.toString(result));
return result;
}
private byte[] getClientMembershipIdBytesOnServer() {
Set cpmIds = ClientHealthMonitor.getInstance().getClientHeartbeats().keySet();
assertEquals(1, cpmIds.size());
ClientProxyMembershipID cpmId = (ClientProxyMembershipID) cpmIds.iterator().next();
System.out.println("client ID on server is " + cpmId.getDistributedMember());
byte[] result = EventID.getMembershipId(cpmId);
System.out.println("client ID bytes are " + Arrays.toString(result));
return result;
}
}
| |
/*
* Copyright (C) 2014 Square, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package okhttp3;
import java.io.IOException;
import java.util.Arrays;
import java.util.Locale;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.SynchronousQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import okhttp3.mockwebserver.MockResponse;
import okhttp3.mockwebserver.MockWebServer;
import okhttp3.mockwebserver.RecordedRequest;
import okhttp3.mockwebserver.SocketPolicy;
import okio.Buffer;
import okio.BufferedSink;
import okio.ForwardingSink;
import okio.ForwardingSource;
import okio.GzipSink;
import okio.Okio;
import okio.Sink;
import okio.Source;
import org.junit.Rule;
import org.junit.Test;
import static okhttp3.TestUtil.defaultClient;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.fail;
public final class InterceptorTest {
@Rule public MockWebServer server = new MockWebServer();
private OkHttpClient client = defaultClient();
private RecordingCallback callback = new RecordingCallback();
@Test public void applicationInterceptorsCanShortCircuitResponses() throws Exception {
server.shutdown(); // Accept no connections.
Request request = new Request.Builder()
.url("https://localhost:1/")
.build();
final Response interceptorResponse = new Response.Builder()
.request(request)
.protocol(Protocol.HTTP_1_1)
.code(200)
.message("Intercepted!")
.body(ResponseBody.create(MediaType.parse("text/plain; charset=utf-8"), "abc"))
.build();
client = client.newBuilder()
.addInterceptor(new Interceptor() {
@Override public Response intercept(Chain chain) throws IOException {
return interceptorResponse;
}
}).build();
Response response = client.newCall(request).execute();
assertSame(interceptorResponse, response);
}
@Test public void networkInterceptorsCannotShortCircuitResponses() throws Exception {
server.enqueue(new MockResponse().setResponseCode(500));
Interceptor interceptor = new Interceptor() {
@Override public Response intercept(Chain chain) throws IOException {
return new Response.Builder()
.request(chain.request())
.protocol(Protocol.HTTP_1_1)
.code(200)
.message("Intercepted!")
.body(ResponseBody.create(MediaType.parse("text/plain; charset=utf-8"), "abc"))
.build();
}
};
client = client.newBuilder()
.addNetworkInterceptor(interceptor)
.build();
Request request = new Request.Builder()
.url(server.url("/"))
.build();
try {
client.newCall(request).execute();
fail();
} catch (IllegalStateException expected) {
assertEquals("network interceptor " + interceptor + " must call proceed() exactly once",
expected.getMessage());
}
}
@Test public void networkInterceptorsCannotCallProceedMultipleTimes() throws Exception {
server.enqueue(new MockResponse());
server.enqueue(new MockResponse());
Interceptor interceptor = new Interceptor() {
@Override public Response intercept(Chain chain) throws IOException {
chain.proceed(chain.request());
return chain.proceed(chain.request());
}
};
client = client.newBuilder()
.addNetworkInterceptor(interceptor)
.build();
Request request = new Request.Builder()
.url(server.url("/"))
.build();
try {
client.newCall(request).execute();
fail();
} catch (IllegalStateException expected) {
assertEquals("network interceptor " + interceptor + " must call proceed() exactly once",
expected.getMessage());
}
}
@Test public void networkInterceptorsCannotChangeServerAddress() throws Exception {
server.enqueue(new MockResponse().setResponseCode(500));
Interceptor interceptor = new Interceptor() {
@Override public Response intercept(Chain chain) throws IOException {
Address address = chain.connection().route().address();
String sameHost = address.url().host();
int differentPort = address.url().port() + 1;
return chain.proceed(chain.request().newBuilder()
.url(HttpUrl.parse("http://" + sameHost + ":" + differentPort + "/"))
.build());
}
};
client = client.newBuilder()
.addNetworkInterceptor(interceptor)
.build();
Request request = new Request.Builder()
.url(server.url("/"))
.build();
try {
client.newCall(request).execute();
fail();
} catch (IllegalStateException expected) {
assertEquals("network interceptor " + interceptor + " must retain the same host and port",
expected.getMessage());
}
}
@Test public void networkInterceptorsHaveConnectionAccess() throws Exception {
server.enqueue(new MockResponse());
Interceptor interceptor = new Interceptor() {
@Override public Response intercept(Chain chain) throws IOException {
Connection connection = chain.connection();
assertNotNull(connection);
return chain.proceed(chain.request());
}
};
client = client.newBuilder()
.addNetworkInterceptor(interceptor)
.build();
Request request = new Request.Builder()
.url(server.url("/"))
.build();
client.newCall(request).execute();
}
@Test public void networkInterceptorsObserveNetworkHeaders() throws Exception {
server.enqueue(new MockResponse()
.setBody(gzip("abcabcabc"))
.addHeader("Content-Encoding: gzip"));
Interceptor interceptor = new Interceptor() {
@Override public Response intercept(Chain chain) throws IOException {
// The network request has everything: User-Agent, Host, Accept-Encoding.
Request networkRequest = chain.request();
assertNotNull(networkRequest.header("User-Agent"));
assertEquals(server.getHostName() + ":" + server.getPort(),
networkRequest.header("Host"));
assertNotNull(networkRequest.header("Accept-Encoding"));
// The network response also has everything, including the raw gzipped content.
Response networkResponse = chain.proceed(networkRequest);
assertEquals("gzip", networkResponse.header("Content-Encoding"));
return networkResponse;
}
};
client = client.newBuilder()
.addNetworkInterceptor(interceptor)
.build();
Request request = new Request.Builder()
.url(server.url("/"))
.build();
// No extra headers in the application's request.
assertNull(request.header("User-Agent"));
assertNull(request.header("Host"));
assertNull(request.header("Accept-Encoding"));
// No extra headers in the application's response.
Response response = client.newCall(request).execute();
assertNull(request.header("Content-Encoding"));
assertEquals("abcabcabc", response.body().string());
}
@Test public void networkInterceptorsCanChangeRequestMethodFromGetToPost() throws Exception {
server.enqueue(new MockResponse());
Interceptor interceptor = new Interceptor() {
@Override public Response intercept(Chain chain) throws IOException {
Request originalRequest = chain.request();
MediaType mediaType = MediaType.parse("text/plain");
RequestBody body = RequestBody.create(mediaType, "abc");
return chain.proceed(originalRequest.newBuilder()
.method("POST", body)
.header("Content-Type", mediaType.toString())
.header("Content-Length", Long.toString(body.contentLength()))
.build());
}
};
client = client.newBuilder()
.addNetworkInterceptor(interceptor)
.build();
Request request = new Request.Builder()
.url(server.url("/"))
.get()
.build();
client.newCall(request).execute();
RecordedRequest recordedRequest = server.takeRequest();
assertEquals("POST", recordedRequest.getMethod());
assertEquals("abc", recordedRequest.getBody().readUtf8());
}
@Test public void applicationInterceptorsRewriteRequestToServer() throws Exception {
rewriteRequestToServer(false);
}
@Test public void networkInterceptorsRewriteRequestToServer() throws Exception {
rewriteRequestToServer(true);
}
private void rewriteRequestToServer(boolean network) throws Exception {
server.enqueue(new MockResponse());
addInterceptor(network, new Interceptor() {
@Override public Response intercept(Chain chain) throws IOException {
Request originalRequest = chain.request();
return chain.proceed(originalRequest.newBuilder()
.method("POST", uppercase(originalRequest.body()))
.addHeader("OkHttp-Intercepted", "yep")
.build());
}
});
Request request = new Request.Builder()
.url(server.url("/"))
.addHeader("Original-Header", "foo")
.method("PUT", RequestBody.create(MediaType.parse("text/plain"), "abc"))
.build();
client.newCall(request).execute();
RecordedRequest recordedRequest = server.takeRequest();
assertEquals("ABC", recordedRequest.getBody().readUtf8());
assertEquals("foo", recordedRequest.getHeader("Original-Header"));
assertEquals("yep", recordedRequest.getHeader("OkHttp-Intercepted"));
assertEquals("POST", recordedRequest.getMethod());
}
@Test public void applicationInterceptorsRewriteResponseFromServer() throws Exception {
rewriteResponseFromServer(false);
}
@Test public void networkInterceptorsRewriteResponseFromServer() throws Exception {
rewriteResponseFromServer(true);
}
private void rewriteResponseFromServer(boolean network) throws Exception {
server.enqueue(new MockResponse()
.addHeader("Original-Header: foo")
.setBody("abc"));
addInterceptor(network, new Interceptor() {
@Override public Response intercept(Chain chain) throws IOException {
Response originalResponse = chain.proceed(chain.request());
return originalResponse.newBuilder()
.body(uppercase(originalResponse.body()))
.addHeader("OkHttp-Intercepted", "yep")
.build();
}
});
Request request = new Request.Builder()
.url(server.url("/"))
.build();
Response response = client.newCall(request).execute();
assertEquals("ABC", response.body().string());
assertEquals("yep", response.header("OkHttp-Intercepted"));
assertEquals("foo", response.header("Original-Header"));
}
@Test public void multipleApplicationInterceptors() throws Exception {
multipleInterceptors(false);
}
@Test public void multipleNetworkInterceptors() throws Exception {
multipleInterceptors(true);
}
private void multipleInterceptors(boolean network) throws Exception {
server.enqueue(new MockResponse());
addInterceptor(network, new Interceptor() {
@Override public Response intercept(Chain chain) throws IOException {
Request originalRequest = chain.request();
Response originalResponse = chain.proceed(originalRequest.newBuilder()
.addHeader("Request-Interceptor", "Android") // 1. Added first.
.build());
return originalResponse.newBuilder()
.addHeader("Response-Interceptor", "Donut") // 4. Added last.
.build();
}
});
addInterceptor(network, new Interceptor() {
@Override public Response intercept(Chain chain) throws IOException {
Request originalRequest = chain.request();
Response originalResponse = chain.proceed(originalRequest.newBuilder()
.addHeader("Request-Interceptor", "Bob") // 2. Added second.
.build());
return originalResponse.newBuilder()
.addHeader("Response-Interceptor", "Cupcake") // 3. Added third.
.build();
}
});
Request request = new Request.Builder()
.url(server.url("/"))
.build();
Response response = client.newCall(request).execute();
assertEquals(Arrays.asList("Cupcake", "Donut"),
response.headers("Response-Interceptor"));
RecordedRequest recordedRequest = server.takeRequest();
assertEquals(Arrays.asList("Android", "Bob"),
recordedRequest.getHeaders().values("Request-Interceptor"));
}
@Test public void asyncApplicationInterceptors() throws Exception {
asyncInterceptors(false);
}
@Test public void asyncNetworkInterceptors() throws Exception {
asyncInterceptors(true);
}
private void asyncInterceptors(boolean network) throws Exception {
server.enqueue(new MockResponse());
addInterceptor(network, new Interceptor() {
@Override public Response intercept(Chain chain) throws IOException {
Response originalResponse = chain.proceed(chain.request());
return originalResponse.newBuilder()
.addHeader("OkHttp-Intercepted", "yep")
.build();
}
});
Request request = new Request.Builder()
.url(server.url("/"))
.build();
client.newCall(request).enqueue(callback);
callback.await(request.url())
.assertCode(200)
.assertHeader("OkHttp-Intercepted", "yep");
}
@Test public void applicationInterceptorsCanMakeMultipleRequestsToServer() throws Exception {
server.enqueue(new MockResponse().setBody("a"));
server.enqueue(new MockResponse().setBody("b"));
client = client.newBuilder()
.addInterceptor(new Interceptor() {
@Override public Response intercept(Chain chain) throws IOException {
Response response1 = chain.proceed(chain.request());
response1.body().close();
return chain.proceed(chain.request());
}
}).build();
Request request = new Request.Builder()
.url(server.url("/"))
.build();
Response response = client.newCall(request).execute();
assertEquals(response.body().string(), "b");
}
/** Make sure interceptors can interact with the OkHttp client. */
@Test public void interceptorMakesAnUnrelatedRequest() throws Exception {
server.enqueue(new MockResponse().setBody("a")); // Fetched by interceptor.
server.enqueue(new MockResponse().setBody("b")); // Fetched directly.
client = client.newBuilder()
.addInterceptor(new Interceptor() {
@Override public Response intercept(Chain chain) throws IOException {
if (chain.request().url().encodedPath().equals("/b")) {
Request requestA = new Request.Builder()
.url(server.url("/a"))
.build();
Response responseA = client.newCall(requestA).execute();
assertEquals("a", responseA.body().string());
}
return chain.proceed(chain.request());
}
}).build();
Request requestB = new Request.Builder()
.url(server.url("/b"))
.build();
Response responseB = client.newCall(requestB).execute();
assertEquals("b", responseB.body().string());
}
/** Make sure interceptors can interact with the OkHttp client asynchronously. */
@Test public void interceptorMakesAnUnrelatedAsyncRequest() throws Exception {
server.enqueue(new MockResponse().setBody("a")); // Fetched by interceptor.
server.enqueue(new MockResponse().setBody("b")); // Fetched directly.
client = client.newBuilder()
.addInterceptor(new Interceptor() {
@Override public Response intercept(Chain chain) throws IOException {
if (chain.request().url().encodedPath().equals("/b")) {
Request requestA = new Request.Builder()
.url(server.url("/a"))
.build();
try {
RecordingCallback callbackA = new RecordingCallback();
client.newCall(requestA).enqueue(callbackA);
callbackA.await(requestA.url()).assertBody("a");
} catch (Exception e) {
throw new RuntimeException(e);
}
}
return chain.proceed(chain.request());
}
}).build();
Request requestB = new Request.Builder()
.url(server.url("/b"))
.build();
RecordingCallback callbackB = new RecordingCallback();
client.newCall(requestB).enqueue(callbackB);
callbackB.await(requestB.url()).assertBody("b");
}
@Test public void applicationInterceptorThrowsRuntimeExceptionSynchronous() throws Exception {
interceptorThrowsRuntimeExceptionSynchronous(false);
}
@Test public void networkInterceptorThrowsRuntimeExceptionSynchronous() throws Exception {
interceptorThrowsRuntimeExceptionSynchronous(true);
}
/**
* When an interceptor throws an unexpected exception, synchronous callers can catch it and deal
* with it.
*/
private void interceptorThrowsRuntimeExceptionSynchronous(boolean network) throws Exception {
addInterceptor(network, new Interceptor() {
@Override public Response intercept(Chain chain) throws IOException {
throw new RuntimeException("boom!");
}
});
Request request = new Request.Builder()
.url(server.url("/"))
.build();
try {
client.newCall(request).execute();
fail();
} catch (RuntimeException expected) {
assertEquals("boom!", expected.getMessage());
}
}
@Test public void networkInterceptorModifiedRequestIsReturned() throws IOException {
server.enqueue(new MockResponse());
Interceptor modifyHeaderInterceptor = new Interceptor() {
@Override public Response intercept(Chain chain) throws IOException {
return chain.proceed(chain.request().newBuilder()
.header("User-Agent", "intercepted request")
.build());
}
};
client = client.newBuilder()
.addNetworkInterceptor(modifyHeaderInterceptor)
.build();
Request request = new Request.Builder()
.url(server.url("/"))
.header("User-Agent", "user request")
.build();
Response response = client.newCall(request).execute();
assertNotNull(response.request().header("User-Agent"));
assertEquals("user request", response.request().header("User-Agent"));
assertEquals("intercepted request", response.networkResponse().request().header("User-Agent"));
}
@Test public void applicationInterceptorThrowsRuntimeExceptionAsynchronous() throws Exception {
interceptorThrowsRuntimeExceptionAsynchronous(false);
}
@Test public void networkInterceptorThrowsRuntimeExceptionAsynchronous() throws Exception {
interceptorThrowsRuntimeExceptionAsynchronous(true);
}
/**
* When an interceptor throws an unexpected exception, asynchronous callers are left hanging. The
* exception goes to the uncaught exception handler.
*/
private void interceptorThrowsRuntimeExceptionAsynchronous(boolean network) throws Exception {
addInterceptor(network, new Interceptor() {
@Override public Response intercept(Chain chain) throws IOException {
throw new RuntimeException("boom!");
}
});
ExceptionCatchingExecutor executor = new ExceptionCatchingExecutor();
client = client.newBuilder()
.dispatcher(new Dispatcher(executor))
.build();
Request request = new Request.Builder()
.url(server.url("/"))
.build();
client.newCall(request).enqueue(callback);
assertEquals("boom!", executor.takeException().getMessage());
}
@Test public void applicationInterceptorReturnsNull() throws Exception {
server.enqueue(new MockResponse());
Interceptor interceptor = new Interceptor() {
@Override public Response intercept(Chain chain) throws IOException {
chain.proceed(chain.request());
return null;
}
};
client = client.newBuilder()
.addInterceptor(interceptor)
.build();
ExceptionCatchingExecutor executor = new ExceptionCatchingExecutor();
client = client.newBuilder()
.dispatcher(new Dispatcher(executor))
.build();
Request request = new Request.Builder()
.url(server.url("/"))
.build();
try {
client.newCall(request).execute();
fail();
} catch (NullPointerException expected) {
assertEquals("application interceptor " + interceptor
+ " returned null", expected.getMessage());
}
}
@Test public void networkInterceptorReturnsNull() throws Exception {
server.enqueue(new MockResponse());
Interceptor interceptor = new Interceptor() {
@Override public Response intercept(Chain chain) throws IOException {
chain.proceed(chain.request());
return null;
}
};
client = client.newBuilder()
.addNetworkInterceptor(interceptor)
.build();
ExceptionCatchingExecutor executor = new ExceptionCatchingExecutor();
client = client.newBuilder()
.dispatcher(new Dispatcher(executor))
.build();
Request request = new Request.Builder()
.url(server.url("/"))
.build();
try {
client.newCall(request).execute();
fail();
} catch (NullPointerException expected) {
assertEquals("network interceptor " + interceptor + " returned null", expected.getMessage());
}
}
@Test public void networkInterceptorReturnsConnectionOnEmptyBody() throws Exception {
server.enqueue(new MockResponse()
.setSocketPolicy(SocketPolicy.DISCONNECT_AT_END)
.addHeader("Connection", "Close"));
Interceptor interceptor = new Interceptor() {
@Override public Response intercept(Chain chain) throws IOException {
Response response = chain.proceed(chain.request());
assertNotNull(chain.connection());
return response;
}
};
client = client.newBuilder()
.addNetworkInterceptor(interceptor)
.build();
Request request = new Request.Builder()
.url(server.url("/"))
.build();
Response response = client.newCall(request).execute();
response.body().close();
}
private RequestBody uppercase(final RequestBody original) {
return new RequestBody() {
@Override public MediaType contentType() {
return original.contentType();
}
@Override public long contentLength() throws IOException {
return original.contentLength();
}
@Override public void writeTo(BufferedSink sink) throws IOException {
Sink uppercase = uppercase(sink);
BufferedSink bufferedSink = Okio.buffer(uppercase);
original.writeTo(bufferedSink);
bufferedSink.emit();
}
};
}
private Sink uppercase(final BufferedSink original) {
return new ForwardingSink(original) {
@Override public void write(Buffer source, long byteCount) throws IOException {
original.writeUtf8(source.readUtf8(byteCount).toUpperCase(Locale.US));
}
};
}
static ResponseBody uppercase(ResponseBody original) throws IOException {
return ResponseBody.create(original.contentType(), original.contentLength(),
Okio.buffer(uppercase(original.source())));
}
private static Source uppercase(final Source original) {
return new ForwardingSource(original) {
@Override public long read(Buffer sink, long byteCount) throws IOException {
Buffer mixedCase = new Buffer();
long count = original.read(mixedCase, byteCount);
sink.writeUtf8(mixedCase.readUtf8().toUpperCase(Locale.US));
return count;
}
};
}
private Buffer gzip(String data) throws IOException {
Buffer result = new Buffer();
BufferedSink sink = Okio.buffer(new GzipSink(result));
sink.writeUtf8(data);
sink.close();
return result;
}
private void addInterceptor(boolean network, Interceptor interceptor) {
OkHttpClient.Builder builder = client.newBuilder();
if (network) {
builder.addNetworkInterceptor(interceptor);
} else {
builder.addInterceptor(interceptor);
}
client = builder.build();
}
/** Catches exceptions that are otherwise headed for the uncaught exception handler. */
private static class ExceptionCatchingExecutor extends ThreadPoolExecutor {
private final BlockingQueue<Exception> exceptions = new LinkedBlockingQueue<Exception>();
public ExceptionCatchingExecutor() {
super(1, 1, 0, TimeUnit.SECONDS, new SynchronousQueue<Runnable>());
}
@Override public void execute(final Runnable runnable) {
super.execute(new Runnable() {
@Override public void run() {
try {
runnable.run();
} catch (Exception e) {
exceptions.add(e);
}
}
});
}
public Exception takeException() throws InterruptedException {
return exceptions.take();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.usergrid.persistence.cassandra;
import java.io.IOException;
import java.math.BigInteger;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.TreeMap;
import java.util.UUID;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.thrift.ColumnDef;
import org.apache.cassandra.thrift.IndexType;
import org.apache.commons.lang.StringUtils;
import org.apache.usergrid.utils.JsonUtils;
import com.fasterxml.jackson.databind.JsonNode;
import me.prettyprint.cassandra.service.ThriftColumnDef;
import me.prettyprint.hector.api.ClockResolution;
import me.prettyprint.hector.api.beans.DynamicComposite;
import me.prettyprint.hector.api.beans.HColumn;
import me.prettyprint.hector.api.ddl.ColumnDefinition;
import me.prettyprint.hector.api.ddl.ColumnFamilyDefinition;
import me.prettyprint.hector.api.ddl.ComparatorType;
import me.prettyprint.hector.api.ddl.KeyspaceDefinition;
import me.prettyprint.hector.api.factory.HFactory;
import me.prettyprint.hector.api.mutation.MutationResult;
import me.prettyprint.hector.api.mutation.Mutator;
import static java.nio.ByteBuffer.wrap;
import static me.prettyprint.hector.api.factory.HFactory.createClockResolution;
import static me.prettyprint.hector.api.factory.HFactory.createColumn;
import static org.apache.commons.beanutils.MethodUtils.invokeStaticMethod;
import static org.apache.commons.lang.StringUtils.removeEnd;
import static org.apache.commons.lang.StringUtils.removeStart;
import static org.apache.commons.lang.StringUtils.split;
import static org.apache.commons.lang.StringUtils.substringAfterLast;
import static org.apache.usergrid.persistence.Schema.PROPERTY_TYPE;
import static org.apache.usergrid.persistence.Schema.PROPERTY_UUID;
import static org.apache.usergrid.persistence.Schema.serializeEntityProperty;
import static org.apache.usergrid.persistence.cassandra.Serializers.be;
import static org.apache.usergrid.utils.ClassUtils.isBasicType;
import static org.apache.usergrid.utils.ConversionUtils.bytebuffer;
import static org.apache.usergrid.utils.JsonUtils.toJsonNode;
import static org.apache.usergrid.utils.StringUtils.replaceAll;
import static org.apache.usergrid.utils.StringUtils.stringOrSubstringBeforeFirst;
/** @author edanuff */
public class CassandraPersistenceUtils {
private static final Logger logger = LoggerFactory.getLogger( CassandraPersistenceUtils.class );
/** Logger for batch operations */
private static final Logger batch_logger =
LoggerFactory.getLogger( CassandraPersistenceUtils.class.getPackage().getName() + ".BATCH" );
/**
*
*/
public static final char KEY_DELIM = ':';
/**
*
*/
public static final UUID NULL_ID = new UUID( 0, 0 );
/**
* @param operation
* @param columnFamily
* @param key
* @param columnName
* @param columnValue
* @param timestamp
*/
public static void logBatchOperation( String operation, Object columnFamily, Object key, Object columnName,
Object columnValue, long timestamp ) {
if ( batch_logger.isDebugEnabled() ) {
batch_logger.debug( "{} cf={} key={} name={} value={}",
operation, columnFamily, key, columnName, columnValue );
}
}
public static void addInsertToMutator( Mutator<ByteBuffer> m, Object columnFamily, Object key, Object columnName,
Object columnValue, long timestamp ) {
logBatchOperation( "Insert", columnFamily, key, columnName, columnValue, timestamp );
if ( columnName instanceof List<?> ) {
columnName = DynamicComposite.toByteBuffer( ( List<?> ) columnName );
}
if ( columnValue instanceof List<?> ) {
columnValue = DynamicComposite.toByteBuffer( ( List<?> ) columnValue );
}
HColumn<ByteBuffer, ByteBuffer> column =
createColumn( bytebuffer( columnName ), bytebuffer( columnValue ), timestamp, be, be );
m.addInsertion( bytebuffer( key ), columnFamily.toString(), column );
}
public static void addInsertToMutator( Mutator<ByteBuffer> m, Object columnFamily, Object key, Map<?, ?> columns,
long timestamp ) throws Exception {
for ( Entry<?, ?> entry : columns.entrySet() ) {
addInsertToMutator( m, columnFamily, key, entry.getKey(), entry.getValue(), timestamp );
}
}
public static void addDeleteToMutator( Mutator<ByteBuffer> m, Object columnFamily, Object key, Object columnName,
long timestamp ) throws Exception {
logBatchOperation( "Delete", columnFamily, key, columnName, null, timestamp );
if ( columnName instanceof List<?> ) {
columnName = DynamicComposite.toByteBuffer( ( List<?> ) columnName );
}
m.addDeletion( bytebuffer( key ), columnFamily.toString(), bytebuffer( columnName ), be, timestamp );
}
public static Map<String, ByteBuffer> getColumnMap( List<HColumn<String, ByteBuffer>> columns ) {
Map<String, ByteBuffer> column_map = new TreeMap<String, ByteBuffer>( String.CASE_INSENSITIVE_ORDER );
if ( columns != null ) {
for ( HColumn<String, ByteBuffer> column : columns ) {
String column_name = column.getName();
column_map.put( column_name, column.getValue() );
}
}
return column_map;
}
public static <K, V> Map<K, V> asMap( List<HColumn<K, V>> columns ) {
if ( columns == null ) {
return null;
}
Map<K, V> column_map = new LinkedHashMap<K, V>();
for ( HColumn<K, V> column : columns ) {
K column_name = column.getName();
column_map.put( column_name, column.getValue() );
}
return column_map;
}
/** @return a composite key */
public static Object key( Object... objects ) {
if ( objects.length == 1 ) {
Object obj = objects[0];
if ( ( obj instanceof UUID ) || ( obj instanceof ByteBuffer ) ) {
return obj;
}
}
StringBuilder s = new StringBuilder();
for ( Object obj : objects ) {
if ( obj instanceof String ) {
s.append( ( ( String ) obj ).toLowerCase() );
}
else if ( obj instanceof List<?> ) {
s.append( key( ( ( List<?> ) obj ).toArray() ) );
}
else if ( obj instanceof Object[] ) {
s.append( key( ( Object[] ) obj ) );
}
else if ( obj != null ) {
s.append( obj );
}
else {
s.append( "*" );
}
s.append( KEY_DELIM );
}
s.deleteCharAt( s.length() - 1 );
return s.toString();
}
/** @return UUID for composite key */
public static UUID keyID( Object... objects ) {
if ( objects.length == 1 ) {
Object obj = objects[0];
if ( obj instanceof UUID ) {
return ( UUID ) obj;
}
}
String keyStr = key( objects ).toString();
if ( keyStr.length() == 0 ) {
return NULL_ID;
}
UUID uuid = UUID.nameUUIDFromBytes( keyStr.getBytes() ); //UUIDUtils.newTimeUUID(); //UUID.nameUUIDFromBytes( keyStr.getBytes() );
if (logger.isTraceEnabled()) {
logger.trace("Key {} equals UUID {}", keyStr, uuid);
}
return uuid;
}
//No longer does retries
public static MutationResult batchExecute( Mutator<?> m, int retries ) {
return m.execute();
}
public static Object toStorableValue( Object obj ) {
if ( obj == null ) {
return null;
}
if ( isBasicType( obj.getClass() ) ) {
return obj;
}
if ( obj instanceof ByteBuffer ) {
return obj;
}
JsonNode json = toJsonNode( obj );
if ( ( json != null ) && json.isValueNode() ) {
if ( json.isBigInteger() ) {
return json.asInt();
}
else if ( json.isNumber() || json.isBoolean() ) {
return BigInteger.valueOf( json.asLong() );
}
else if ( json.isTextual() ) {
return json.asText();
}
else if ( json.isBinary() ) {
try {
return wrap( json.binaryValue() );
}
catch ( IOException e ) {
}
}
}
return json;
}
public static ByteBuffer toStorableBinaryValue( Object obj, boolean forceJson ) {
obj = toStorableValue( obj );
if ( ( obj instanceof JsonNode ) || ( forceJson && ( obj != null ) && !( obj instanceof ByteBuffer ) ) ) {
return JsonUtils.toByteBuffer( obj );
}
else {
return bytebuffer( obj );
}
}
public static List<ColumnDefinition> getIndexMetadata( String indexes ) {
if ( indexes == null ) {
return null;
}
String[] index_entries = split( indexes, ',' );
List<ColumnDef> columns = new ArrayList<ColumnDef>();
for ( String index_entry : index_entries ) {
String column_name = stringOrSubstringBeforeFirst( index_entry, ':' ).trim();
String comparer = substringAfterLast( index_entry, ":" ).trim();
if ( StringUtils.isBlank( comparer ) ) {
comparer = "UUIDType";
}
if ( StringUtils.isNotBlank( column_name ) ) {
ColumnDef cd = new ColumnDef( bytebuffer( column_name ), comparer );
cd.setIndex_name( column_name );
cd.setIndex_type( IndexType.KEYS );
columns.add( cd );
}
}
return ThriftColumnDef.fromThriftList( columns );
}
public static List<ColumnFamilyDefinition> getCfDefs( Class<? extends CFEnum> cfEnum, String keyspace ) {
return getCfDefs( cfEnum, null, keyspace );
}
public static List<ColumnFamilyDefinition> getCfDefs( Class<? extends CFEnum> cfEnum,
List<ColumnFamilyDefinition> cf_defs, String keyspace ) {
if ( cf_defs == null ) {
cf_defs = new ArrayList<ColumnFamilyDefinition>();
}
CFEnum[] values = null;
try {
values = ( CFEnum[] ) invokeStaticMethod( cfEnum, "values", null);
}
catch ( Exception e ) {
logger.error( "Couldn't get CFEnum values", e );
}
if ( values == null ) {
return null;
}
for ( CFEnum cf : values ) {
if ( !cf.create() ) {
continue;
}
String defaultValidationClass = cf.getValidator();
List<ColumnDefinition> metadata = cf.getMetadata();
ColumnFamilyDefinition cf_def = HFactory.createColumnFamilyDefinition( keyspace, cf.getColumnFamily(),
ComparatorType.getByClassName( cf.getComparator() ), metadata );
if ( defaultValidationClass != null ) {
cf_def.setDefaultValidationClass( defaultValidationClass );
}
cf_defs.add( cf_def );
}
return cf_defs;
}
}
| |
/**
* Copyright 2005-2015 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.rice.core.framework.config.property;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.kuali.rice.core.api.config.property.Config;
import org.kuali.rice.core.api.config.property.ConfigContext;
import org.kuali.rice.core.api.util.ClasspathOrFileResourceLoader;
import org.kuali.rice.core.api.util.Truth;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
/**
* This abstract class implements all the convenience config methods that
* can be implemented independent of the config impl.
*
* @author Kuali Rice Team (rice.collab@kuali.org)
*
*/
public abstract class AbstractBaseConfig implements org.kuali.rice.core.api.config.property.Config {
private static final Logger LOG = Logger.getLogger(AbstractBaseConfig.class);
public abstract Object getObject(String key);
public abstract Map<String, Object> getObjects();
public abstract Properties getProperties();
public abstract String getProperty(String key);
public abstract void parseConfig() throws IOException;
public Map<String, String> getPropertiesWithPrefix(String prefix, boolean stripPrefix) {
Map<String, String> props = new HashMap<String, String>();
for (Map.Entry<Object, Object> entry : getProperties().entrySet()) {
String key = (String) entry.getKey();
if (StringUtils.isNotBlank(key) && key.trim().startsWith(prefix)) {
props.put(stripPrefix ? key.substring(prefix.length()) : key, (String) entry.getValue());
}
}
return props;
}
public String getAlternateOJBFile() {
return getProperty(org.kuali.rice.core.api.config.property.Config.ALT_OJB_FILE);
}
public String getAlternateSpringFile() {
return getProperty(org.kuali.rice.core.api.config.property.Config.ALT_SPRING_FILE);
}
public String getBaseWebServiceURL() {
return getProperty(org.kuali.rice.core.api.config.property.Config.BASE_WEB_SERVICE_URL_WORKFLOW_CLIENT_FILE);
}
public String getBaseWebServiceWsdlPath() {
return getProperty(org.kuali.rice.core.api.config.property.Config.BASE_WEB_SERVICE_WSDL_PATH);
}
public Boolean getBatchMode() {
return new Boolean(getProperty(org.kuali.rice.core.api.config.property.Config.BATCH_MODE));
}
@Override
public boolean getBooleanProperty(String key, boolean defaultValue) {
return Truth.strToBooleanIgnoreCase(getProperty(key), defaultValue).booleanValue();
}
@Override
public Boolean getBooleanProperty(String key) {
return Truth.strToBooleanIgnoreCase(getProperty(key));
}
@Override
public long getNumericProperty(String key, long defaultValue) {
Long propertyValue = getNumericProperty(key);
if (propertyValue == null) {
return defaultValue;
} else {
return propertyValue.longValue();
}
}
@Override
public Long getNumericProperty(String key) {
String propertyValue = getProperty(key);
if (StringUtils.isBlank(propertyValue)) {
return null;
}
return new Long(propertyValue);
}
public String getClientWSDLFullPathAndFileName() {
return getProperty(Config.WSDL_LOCATION_WORKFLOW_CLIENT_FILE);
}
public String getDailyEmailFirstDeliveryDate() {
return getProperty(Config.FIRST_DAILY_EMAIL_DELIVERY_DATE);
}
public String getDefaultKewNoteClass() {
return getProperty(Config.DEFAULT_KEW_NOTE_CLASS);
}
public Boolean getDevMode() {
return Boolean.valueOf(getProperty(Config.DEV_MODE));
}
public String getDocumentLockTimeout() {
return getProperty(Config.DOCUMENT_LOCK_TIMEOUT);
}
public String getEDLConfigLocation() {
return getProperty(Config.EDL_CONFIG_LOCATION);
}
public String getEmailConfigurationPath() {
return getProperty(org.kuali.rice.core.api.config.property.Config.EMAIL_SECURITY_PATH);
}
public Boolean getEmailReminderLifecycleEnabled() {
return Boolean.valueOf(getProperty(org.kuali.rice.core.api.config.property.Config.ENABLE_EMAIL_REMINDER_LIFECYCLE));
}
public String getEmbeddedPluginLocation() {
return getProperty(Config.EMBEDDED_PLUGIN_LOCATIAON);
}
public String getEndPointUrl() {
// TODO why was this using ConfigContext.getCurrentConfig in the rice BaseConfig
return getProperty(org.kuali.rice.core.api.config.property.Config.SERVICE_SERVLET_URL);
}
public String getEnvironment() {
return getProperty(Config.ENVIRONMENT);
}
public String getProductionEnvironmentCode() {
return getProperty(Config.PROD_ENVIRONMENT_CODE);
}
public boolean isProductionEnvironment() {
String env = getEnvironment();
String prod = getProductionEnvironmentCode();
// test whether the current env is production. assuming undefined env or prod code
// is a configuration error ensure that it returns false
return env != null && prod != null && StringUtils.equalsIgnoreCase(env, prod);
}
/**
* @return the {@link RICE_VERSION} property
*/
public String getRiceVersion() {
return getProperty(Config.RICE_VERSION);
}
/**
* @return {@link APPLICATION_NAME} or {@link MODULE_NAME} property
*/
public String getApplicationName() {
// first try APPLICATION_NAME
String val = getProperty(APPLICATION_NAME);
if (val == null) {
val = getProperty(MODULE_NAME);
}
return val;
}
/**
* @return {@link APPLICATION_VERSION} or {@link VERSION} property
*/
public String getApplicationVersion() {
// first try APPLICATION_VERSION
String val = getProperty(APPLICATION_VERSION);
if (val == null) {
val = getProperty(VERSION);
}
return val;
}
public String getKENBaseURL() {
return getProperty(Config.KEN_URL);
}
public String getKEWBaseURL() {
return getProperty(org.kuali.rice.core.api.config.property.Config.KEW_URL);
}
public String getKIMBaseURL() {
return getProperty(Config.KIM_URL);
}
public String getKRBaseURL() {
return getProperty(org.kuali.rice.core.api.config.property.Config.KR_URL);
}
public String getKeystoreAlias() {
return getProperty(org.kuali.rice.core.api.config.property.Config.KEYSTORE_ALIAS);
}
public String getKeystoreFile() throws IOException {
String keyStoreFile = getProperty(org.kuali.rice.core.api.config.property.Config.KEYSTORE_FILE);
return new ClasspathOrFileResourceLoader().getResource(keyStoreFile).getURL().getPath();
}
public String getKeystorePassword() {
return getProperty(org.kuali.rice.core.api.config.property.Config.KEYSTORE_PASSWORD);
}
public String getLog4jFileLocation() {
return getProperty(org.kuali.rice.core.api.config.property.Config.LOG4J_SETTINGS_PATH);
}
public String getLog4jReloadInterval() {
return getProperty(org.kuali.rice.core.api.config.property.Config.LOG4J_SETTINGS_RELOADINTERVAL_MINS);
}
public Boolean getOutBoxOn() {
// subtle difference with BaseConfig - BaseConfig returned true on missing property
return Boolean.valueOf(getProperty(org.kuali.rice.core.api.config.property.Config.OUT_BOX_MODE));
}
public Integer getRefreshRate() {
// TODO can this be moved to default config file
// TODO why going to currentContextConfig
Integer refreshRate;
try {
refreshRate = new Integer(ConfigContext.getCurrentContextConfig().getProperty(Config.REFRESH_RATE));
} catch (NumberFormatException nfe) {
LOG.error("Couldn't parse property " + org.kuali.rice.core.api.config.property.Config.REFRESH_RATE + " to set bus refresh rate. Defaulting to 60 seconds.");
ConfigContext.getCurrentContextConfig().putProperty(org.kuali.rice.core.api.config.property.Config.REFRESH_RATE, "60");
return 60;
}
return refreshRate;
}
public String getTransactionTimeout() {
return getProperty(org.kuali.rice.core.api.config.property.Config.TRANSACTION_TIMEOUT);
}
public String getWebServicesConnectRetry() {
return getProperty(org.kuali.rice.core.api.config.property.Config.WEB_SERVICE_CONNECT_RETRY);
}
public String getWeeklyEmailFirstDeliveryDate() {
return getProperty(org.kuali.rice.core.api.config.property.Config.FIRST_WEEKLY_EMAIL_DELIVERY_DATE);
}
public Boolean getXmlPipelineLifeCycleEnabled() {
return Boolean.valueOf(getProperty(Config.ENABLE_XML_PIPELINE_LIFECYCLE));
}
public Boolean getExternalActnListNotificationLifeCycleEnabled() {
return Boolean.valueOf(getProperty(Config.ENABLE_EXTERNAL_ACTN_LIST_NOTIFICATION_LIFECYCLE));
}
}
| |
package io.github.randyp.jdbj.db.derby_10_11;
import io.github.randyp.jdbj.ExecuteUpdate;
import io.github.randyp.jdbj.JDBJ;
import io.github.randyp.jdbj.NamedParameterStatement;
import io.github.randyp.jdbj.SimpleBuilder;
import io.github.randyp.jdbj.test.binding.value.*;
import org.junit.After;
import org.junit.ClassRule;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.runners.Enclosed;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import javax.sql.DataSource;
import java.io.Reader;
import java.net.URL;
import java.sql.*;
import java.util.GregorianCalendar;
@RunWith(Enclosed.class)
public class ValueBindingBuilderTest {
@ClassRule
public static final DerbyRule db = new DerbyRule() {
@Override
protected void before() throws Throwable {
super.before();
cleanup();
try (Connection connection = getConnection()) {
try (PreparedStatement ps = connection.prepareStatement("CREATE TABLE binding(bound VARCHAR(500))")) {
ps.execute();
}
try (PreparedStatement ps = connection.prepareStatement("CREATE TABLE binding_blob(bound BLOB)")) {
ps.execute();
}
}
}
@Override
protected void after() {
cleanup();
super.after();
}
private void cleanup() {
try {
try (Connection connection = getConnection()) {
try (PreparedStatement ps = connection.prepareStatement("DROP TABLE binding_blob")) {
ps.execute();
} catch (SQLException e) {
//ignore
}
try (PreparedStatement ps = connection.prepareStatement("DROP TABLE binding")) {
ps.execute();
} catch (SQLException e) {
//ignore
}
}
} catch (SQLException e) {
//ignore
}
}
};
public static final NamedParameterStatement statement = NamedParameterStatement.make("SELECT bound FROM binding WHERE :binding <> 'A' OR :binding IS NULL");
public static ExecuteUpdate update = JDBJ.update("INSERT INTO binding(bound) VALUES(:binding)");
public static void clearBindings() throws Exception {
try (Connection connection = db.getConnection();
PreparedStatement ps = connection.prepareStatement("DELETE FROM binding")) {
ps.execute();
}
}
public static class BindArray extends BindArrayTest {
@After
public void tearDown() throws Exception {
clearBindings();
}
@Override
public DataSource db() {
return db;
}
@Test(expected = SQLFeatureNotSupportedException.class)
@Override
public void value() throws Exception {
try(Connection connection = db.getConnection()){
final Array array = connection.createArrayOf("varchar", expected);
update.bindArray(":binding", array).execute(connection);
}
super.value();
}
@Test(expected = SQLFeatureNotSupportedException.class)
@Override
public void Null() throws Exception {
update.bindArray(":binding", null).execute(db);
super.Null();
}
@Override
public SimpleBuilder builder() {
return new SimpleBuilder(statement);
}
}
public static class BindAsciiStream extends BindAsciiStreamTest {
@After
public void tearDown() throws Exception {
clearBindings();
}
@Override
public DataSource db() {
return db;
}
@Test(expected = SQLException.class) //appears to be bug in driver
@Override
public void stream() throws Exception {
update.bindAsciiStream(":binding", expectedStream()).execute(db);
super.stream();
}
@Test
@Override
public void streamNull() throws Exception {
update.bindAsciiStream(":binding", null).execute(db);
super.streamNull();
}
@Test
@Override
public void streamLength() throws Exception {
update.bindAsciiStream(":binding", expectedStream(), expected.length()).execute(db);
super.streamLength();
}
@Test
@Override
public void streamNullLength() throws Exception {
update.bindAsciiStream(":binding", null, expected.length()).execute(db);
super.streamNullLength();
}
@Test
@Override
public void streamLengthLong() throws Exception {
update.bindAsciiStream(":binding", expectedStream(), (long) expected.length()).execute(db);
super.streamLengthLong();
}
@Test
@Override
public void streamNullLengthLong() throws Exception {
update.bindAsciiStream(":binding", null, (long) expected.length()).execute(db);
super.streamNullLengthLong();
}
@Override
public SimpleBuilder builder() {
return new SimpleBuilder(statement);
}
}
public static class BindBigDecimal extends BindBigDecimalTest {
@After
public void tearDown() throws Exception {
clearBindings();
}
@Override
public DataSource db() {
return db;
}
@Test
@Override
public void value() throws Exception {
update.bindBigDecimal(":binding", expected).execute(db);
super.value();
}
@Test
@Override
public void Null() throws Exception {
update.bindBigDecimal(":binding", null).execute(db);
super.Null();
}
@Override
public SimpleBuilder builder() {
return new SimpleBuilder(statement);
}
}
/*
public static class BindBinaryStream extends BindBinaryStreamTest {
@After
public void tearDown() throws Exception {
clearBindings();
}
@Override
public DataSource db() {
return db;
}
@Test
@Override
public void inputStream() throws Exception {
updateBinary.bindBinaryStream(":binding", expectedStream()).execute(db);
super.inputStream();
}
@Test
@Override
public void inputStreamNull() throws Exception {
updateBinary.bindBinaryStream(":binding", null).execute(db);
super.inputStreamNull();
}
@Test
@Override
public void inputLengthStream() throws Exception {
updateBinary.bindBinaryStream(":binding", expectedStream(), expected.length).execute(db);
super.inputLengthStream();
}
@Test
@Override
public void inputLengthStreamNull() throws Exception {
updateBinary.bindBinaryStream(":binding", null, expected.length).execute(db);
super.inputLengthStreamNull();
}
@Test
@Override
public void inputLengthLongStream() throws Exception {
updateBinary.bindBinaryStream(":binding", expectedStream(), (long) expected.length).execute(db);
super.inputLengthLongStream();
}
@Test
@Override
public void inputLengthLongStreamNull() throws Exception {
updateBinary.bindBinaryStream(":binding", null, (long) expected.length).execute(db);
super.inputLengthLongStreamNull();
}
@Override
public SimpleBuilder builder() {
return new SimpleBuilder(statement_binary);
}
}
*/
/*
public static class BindBlob extends BindBlobTest {
@After
public void tearDown() throws Exception {
clearBindings();
}
@Rule
public ExpectedException thrown = ExpectedException.none();
@Override
public DataSource db() {
return db;
}
@Test
@Override
public void value() throws Exception {
try(Connection connection = db.getConnection()){
final Blob blob = connection.createBlob();
blob.setBytes(1, expected);
update_blob.bindBlob(":binding", blob).execute(connection);
}
super.value();
}
@Test
@Override
public void valueNull() throws Exception {
update_blob.bindBlob(":binding", (Blob) null).execute(db);
super.valueNull();
}
@Test
@Override
public void inputStream() throws Exception {
update_blob.bindBlob(":binding", expectedStream()).execute(db);
super.inputStream();
}
@Test
@Override
public void inputStreamNull() throws Exception {
update_blob.bindBlob(":binding", (InputStream) null).execute(db);
super.inputStreamNull();
}
@Test
@Override
public void inputStreamLength() throws Exception {
update_blob.bindBlob(":binding", expectedStream(), expected.length).execute(db);
super.inputStreamLength();
}
@Test
@Override
public void inputStreamNullLength() throws Exception {
update_blob.bindBlob(":binding", null, expected.length).execute(db);
super.inputStreamNullLength();
}
@Override
public SimpleBuilder builder() {
return new SimpleBuilder(statement_blob);
}
}
*/
public static class BindBoolean extends BindBooleanTest {
@After
public void tearDown() throws Exception {
clearBindings();
}
@Override
public DataSource db() {
return db;
}
@Test
@Override
public void False() throws Exception {
update.bindBoolean(":binding", Boolean.FALSE).execute(db);
super.False();
}
@Test
@Override
public void True() throws Exception {
update.bindBoolean(":binding", Boolean.TRUE).execute(db);
super.True();
}
@Test
@Override
public void Null() throws Exception {
update.bindBoolean(":binding", null).execute(db);
super.Null();
}
@Test
@Override
public void primitiveFalse() throws Exception {
update.bindBooleanPrimitive(":binding", false).execute(db);
super.primitiveFalse();
}
@Test
@Override
public void primitiveTrue() throws Exception {
update.bindBoolean(":binding", true).execute(db);
super.primitiveTrue();
}
@Override
public SimpleBuilder builder() {
return new SimpleBuilder(statement);
}
}
public static class BindByte extends BindByteTest {
@After
public void tearDown() throws Exception {
clearBindings();
}
@Override
public DataSource db() {
return db;
}
@Test
@Override
public void value() throws Exception {
update.bindByte(":binding", expected).execute(db);
super.value();
}
@Test
@Override
public void Null() throws Exception {
update.bindByte(":binding", null).execute(db);
super.Null();
}
@Test
@Override
public void primitive() throws Exception {
update.bindBytePrimitive(":binding", expected).execute(db);
super.primitive();
}
@Override
public SimpleBuilder builder() {
return new SimpleBuilder(statement);
}
}
// public static class BindByteArray extends BindByteArrayTest {
// @Override
// public DataSource db() {
// return db;
// }
// }
//
// public static class BindCharacterStream extends BindCharacterStreamTest {
// @Override
// public DataSource db() {
// return db;
// }
// }
//
// public static class BindClob extends BindClobTest {
// @Override
// public DataSource db() {
// return db;
// }
// }
public static class BindDate extends BindDateTest {
@After
public void tearDown() throws Exception {
clearBindings();
}
@Override
public DataSource db() {
return db;
}
@Test
@Override
public void value() throws Exception {
update.bindDate(":binding", expectedDate).execute(db);
super.value();
}
@Test
@Override
public void valueNull() throws Exception {
update.bindDate(":binding", null).execute(db);
super.valueNull();
}
@Test
@Override
public void valueCalendar() throws Exception {
update.bindDate(":binding", expectedDate, GregorianCalendar.getInstance()).execute(db);
super.valueCalendar();
}
@Test
@Override
public void valueCalendarNull() throws Exception {
update.bindDate(":binding", null, GregorianCalendar.getInstance()).execute(db);
super.valueCalendarNull();
}
@Override
public SimpleBuilder builder() {
return new SimpleBuilder(statement);
}
}
public static class BindDouble extends BindDoubleTest {
@After
public void tearDown() throws Exception {
clearBindings();
}
@Override
public DataSource db() {
return db;
}
@Test
@Override
public void value() throws Exception {
update.bindDouble(":binding", expected).execute(db);
super.value();
}
@Test
@Override
public void Null() throws Exception {
update.bindDouble(":binding", null).execute(db);
super.Null();
}
@Test
@Override
public void primitive() throws Exception {
update.bindDoublePrimitive(":binding", expected).execute(db);
super.primitive();
}
@Override
public SimpleBuilder builder() {
return new SimpleBuilder(statement);
}
}
public static class BindFloat extends BindFloatTest {
@After
public void tearDown() throws Exception {
clearBindings();
}
@Override
public DataSource db() {
return db;
}
@Test
@Override
public void value() throws Exception {
update.bindFloat(":binding", expected).execute(db);
super.value();
}
@Test
@Override
public void Null() throws Exception {
update.bindFloat(":binding", null).execute(db);
super.Null();
}
@Test
@Override
public void primitive() throws Exception {
update.bindFloatPrimitive(":binding", expected).execute(db);
super.primitive();
}
@Override
public SimpleBuilder builder() {
return new SimpleBuilder(statement);
}
}
public static class BindInteger extends BindIntegerTest {
@After
public void tearDown() throws Exception {
clearBindings();
}
@Override
public DataSource db() {
return db;
}
@Test
@Override
public void value() throws Exception {
update.bindInteger(":binding", expected).execute(db);
super.value();
}
@Test
@Override
public void Null() throws Exception {
update.bindInteger(":binding", null).execute(db);
super.Null();
}
@Test
@Override
public void primitive() throws Exception {
update.bindIntegerPrimitive(":binding", expected).execute(db);
super.primitive();
}
@Test
@Override
public void primitiveAlias() throws Exception {
update.bindInt(":binding", expected).execute(db);
super.primitiveAlias();
}
@Override
public SimpleBuilder builder() {
return new SimpleBuilder(statement);
}
}
public static class BindLong extends BindLongTest {
@After
public void tearDown() throws Exception {
clearBindings();
}
@Override
public DataSource db() {
return db;
}
@Test
@Override
public void value() throws Exception {
update.bindLong(":binding", expected).execute(db);
super.value();
}
@Test
@Override
public void Null() throws Exception {
update.bindLong(":binding", null).execute(db);
super.Null();
}
@Test
@Override
public void primitive() throws Exception {
update.bindLongPrimitive(":binding", expected).execute(db);
super.primitive();
}
@Override
public SimpleBuilder builder() {
return new SimpleBuilder(statement);
}
}
public static class BindNCharacterStream extends BindNCharacterStreamTest {
@After
public void tearDown() throws Exception {
clearBindings();
}
@Override
public DataSource db() {
return db;
}
@Test(expected = SQLFeatureNotSupportedException.class)
@Override
public void reader() throws Exception {
update.bindNCharacterStream(":binding", expectedReader()).execute(db);
super.reader();
}
@Test(expected = SQLFeatureNotSupportedException.class)
@Override
public void readerNull() throws Exception {
update.bindNCharacterStream(":binding", null).execute(db);
super.readerNull();
}
@Test(expected = SQLFeatureNotSupportedException.class)
@Override
public void readerLength() throws Exception {
update.bindNCharacterStream(":binding", expectedReader(), expected.length()).execute(db);
super.readerLength();
}
@Test(expected = SQLFeatureNotSupportedException.class)
@Override
public void readerNullLength() throws Exception {
update.bindNCharacterStream(":binding", null, expected.length()).execute(db);
super.readerNullLength();
}
@Override
public SimpleBuilder builder() {
return new SimpleBuilder(statement);
}
}
public static class BindNClob extends BindNClobTest {
@After
public void tearDown() throws Exception {
clearBindings();
}
@Override
public DataSource db() {
return db;
}
@Test(expected = SQLFeatureNotSupportedException.class)
@Override
public void value() throws Exception {
try (Connection connection = db.getConnection()) {
final NClob nClob = connection.createNClob();
nClob.setString(1, expected);
update.bindNClob(":binding", nClob).execute(connection);
}
super.value();
}
@Test(expected = SQLFeatureNotSupportedException.class)
@Override
public void valueNull() throws Exception {
update.bindNClob(":binding", (NClob) null).execute(db);
super.valueNull();
}
@Test(expected = SQLFeatureNotSupportedException.class)
@Override
public void reader() throws Exception {
update.bindNClob(":binding", expectedReader()).execute(db);
super.reader();
}
@Test(expected = SQLFeatureNotSupportedException.class)
@Override
public void readerNull() throws Exception {
update.bindNClob(":binding", (Reader) null).execute(db);
super.readerNull();
}
@Test(expected = SQLFeatureNotSupportedException.class)
@Override
public void readerLength() throws Exception {
update.bindNClob(":binding", expectedReader(), expected.length()).execute(db);
super.readerLength();
}
@Test(expected = SQLFeatureNotSupportedException.class)
@Override
public void readerNullLength() throws Exception {
update.bindNClob(":binding", null, expected.length()).execute(db);
super.readerNullLength();
}
@Override
public SimpleBuilder builder() {
return new SimpleBuilder(statement);
}
}
public static class BindNString extends BindNStringTest {
@After
public void tearDown() throws Exception {
clearBindings();
}
@Override
public DataSource db() {
return db;
}
@Test(expected = SQLFeatureNotSupportedException.class)
@Override
public void value() throws Exception {
update.bindNString(":binding", expected).execute(db);
super.value();
}
@Test(expected = SQLFeatureNotSupportedException.class)
@Override
public void Null() throws Exception {
update.bindNString(":binding", null).execute(db);
super.Null();
}
@Override
public SimpleBuilder builder() {
return new SimpleBuilder(statement);
}
}
public static class BindNull extends BindNullTest {
@After
public void tearDown() throws Exception {
clearBindings();
}
@Override
public DataSource db() {
return db;
}
@Test
@Override
public void type() throws Exception {
update.bindNull(":binding", Types.VARCHAR).execute(db);
super.type();
}
@Test
@Override
public void typeAndName() throws Exception {
update.bindNull(":binding", Types.VARCHAR, "varchar").execute(db);
super.typeAndName();
}
@Override
public SimpleBuilder builder() {
return new SimpleBuilder(statement);
}
}
public static class BindObject extends BindObjectTest {
@After
public void tearDown() throws Exception {
clearBindings();
}
@Override
public DataSource db() {
return db;
}
@Test
@Override
public void value() throws Exception {
update.bindObject(":binding", expected).execute(db);
super.value();
}
@Test
@Override
public void valueNull() throws Exception {
update.bindObject(":binding", null).execute(db);
super.valueNull();
}
@Test
@Override
public void valueType() throws Exception {
update.bindObject(":binding", expected, Types.VARCHAR).execute(db);
super.valueType();
}
@Test
@Override
public void valueNullType() throws Exception {
update.bindObject(":binding", null, Types.VARCHAR).execute(db);
super.valueNullType();
}
@Test
@Override
public void valueTypeLength() throws Exception {
update.bindObject(":binding", expected, Types.VARCHAR, 5).execute(db);
super.valueTypeLength();
}
@Test
@Override
public void valueNullTypeLength() throws Exception {
update.bindObject(":binding", null, Types.VARCHAR, 5).execute(db);
super.valueNullTypeLength();
}
@Test
@Override
public void valueSQLType() throws Exception {
update.bindObject(":binding", expected, JDBCType.VARCHAR).execute(db);
super.valueSQLType();
}
@Test
@Override
public void valueNullSQLType() throws Exception {
update.bindObject(":binding", null, JDBCType.VARCHAR).execute(db);
super.valueNullSQLType();
}
@Test
@Override
public void valueSQLTypeLength() throws Exception {
update.bindObject(":binding", expected, JDBCType.VARCHAR, 5).execute(db);
super.valueSQLTypeLength();
}
@Test
@Override
public void valueNullSQLTypeLength() throws Exception {
update.bindObject(":binding", null, JDBCType.VARCHAR, 5).execute(db);
super.valueNullSQLTypeLength();
}
@Override
public SimpleBuilder builder() {
return new SimpleBuilder(statement);
}
}
public static class BindShort extends BindShortTest {
@After
public void tearDown() throws Exception {
clearBindings();
}
@Override
public DataSource db() {
return db;
}
@Test
@Override
public void value() throws Exception {
update.bindShort(":binding", expected).execute(db);
super.value();
}
@Test
@Override
public void Null() throws Exception {
update.bindShort(":binding", null).execute(db);
super.Null();
}
@Test
@Override
public void primitive() throws Exception {
update.bindShortPrimitive(":binding", expected).execute(db);
super.primitive();
}
@Override
public SimpleBuilder builder() {
return new SimpleBuilder(statement);
}
}
public static class BindSQLXML extends BindSQLXMLTest {
@Override
public DataSource db() {
return db;
}
@Override
@Test(expected = SQLFeatureNotSupportedException.class)
public void value() throws Exception {
try (Connection connection = db().getConnection()) {
final SQLXML sqlxml = connection.createSQLXML();
sqlxml.setString(expected);
update.bindSQLXML(":binding", sqlxml).execute(db);
}
super.value();
}
@Override
@Test(expected = SQLFeatureNotSupportedException.class)
public void Null() throws Exception {
update.bindSQLXML(":binding", null).execute(db);
super.Null();
}
@Override
public SimpleBuilder builder() {
return new SimpleBuilder(statement);
}
}
public static class BindString extends BindStringTest {
@After
public void tearDown() throws Exception {
clearBindings();
}
@Override
public DataSource db() {
return db;
}
@Test
@Override
public void value() throws Exception {
update.bindString(":binding", expected).execute(db);
super.value();
}
@Test
@Override
public void valueNull() throws Exception {
update.bindString(":binding", null).execute(db);
super.valueNull();
}
@Override
public SimpleBuilder builder() {
return new SimpleBuilder(statement);
}
}
public static class BindTime extends BindTimeTest {
@After
public void tearDown() throws Exception {
clearBindings();
}
@Override
public DataSource db() {
return db;
}
@Test
@Override
public void value() throws Exception {
update.bindTime(":binding", expectedTime).execute(db);
super.value();
}
@Test
@Override
public void valueNull() throws Exception {
update.bindTime(":binding", null).execute(db);
super.valueNull();
}
@Test
@Override
public void valueCalendar() throws Exception {
update.bindTime(":binding", expectedTime, GregorianCalendar.getInstance()).execute(db);
super.valueCalendar();
}
@Test
@Override
public void valueCalendarNull() throws Exception {
update.bindTime(":binding", null, GregorianCalendar.getInstance()).execute(db);
super.valueCalendarNull();
}
@Override
public SimpleBuilder builder() {
return new SimpleBuilder(statement);
}
}
public static class BindTimestamp extends BindTimestampTest {
@Override
public DataSource db() {
return db;
}
@After
public void tearDown() throws Exception {
clearBindings();
}
@Test
@Override
public void value() throws Exception {
update.bindTimestamp(":binding", expectedTimestamp).execute(db);
super.value();
}
@Test
@Override
public void valueNull() throws Exception {
update.bindTimestamp(":binding", null).execute(db);
super.valueNull();
}
@Test
@Override
public void valueCalendar() throws Exception {
update.bindTimestamp(":binding", new Timestamp(expectedTime), GregorianCalendar.getInstance()).execute(db);
super.valueCalendar();
}
@Test
@Override
public void valueCalendarNull() throws Exception {
update.bindTimestamp(":binding", null, GregorianCalendar.getInstance()).execute(db);
super.valueCalendarNull();
}
@Override
public SimpleBuilder builder() {
return new SimpleBuilder(statement);
}
}
public static class BindURL extends BindURLTest {
@Rule
public ExpectedException thrown = ExpectedException.none();
@Override
public DataSource db() {
return db;
}
@After
public void tearDown() throws Exception {
clearBindings();
}
@Override
@Test(expected = SQLFeatureNotSupportedException.class)
public void value() throws Exception {
update.bindURL(":binding", new URL("http", "google.com", 8080, "/")).execute(db());
super.value();
}
@Override
@Test(expected = SQLFeatureNotSupportedException.class)
public void Null() throws Exception {
update.bindURL(":binding", null).execute(db());
super.Null();
}
@Override
public SimpleBuilder builder() {
return new SimpleBuilder(statement);
}
}
}
| |
package com.freewaycoffee.client;
import java.io.IOException;
import java.io.StringReader;
import java.io.UnsupportedEncodingException;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import android.app.Activity;
import android.app.ProgressDialog;
import android.content.Intent;
import android.content.pm.ActivityInfo;
import android.content.res.Configuration;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.widget.SeekBar;
import android.widget.TextView;
import android.widget.Toast;
import android.widget.SeekBar.OnSeekBarChangeListener;
public class FreewayCoffeeTimeToLocationActivity extends Activity
{
private FreewayCoffeeApp appState;
private SeekBar TimeSeeker;
private TextView TimeToLocationText;
private Integer CurrentTimeToLocation;
private Integer OriginalTimeToLocation;
private ProgressDialog TimeToLocProgress;
private FreewayCoffeeTimeToLocationXMLHandler TimeToLocationXMLHandler;
private FreewayCoffeeTimeToLocationAsyncGet AsyncGet;
@Override
public void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
appState = ((FreewayCoffeeApp)getApplicationContext());
setContentView(R.layout.fc_time_to_location);
Object retained = getLastNonConfigurationInstance();
if(retained instanceof FreewayCoffeeTimeToLocationAsyncGet)
{
AsyncGet=(FreewayCoffeeTimeToLocationAsyncGet)retained;
CurrentTimeToLocation = AsyncGet.CurrentTimeToLocationSaved;
OriginalTimeToLocation = AsyncGet.OriginalTimeToLocationSaved;
showProgressDialog();// This must be before setActivity as that calls ProcessXML which destroys the dialog --- so we dont want to just re-create it again
AsyncGet.SetActivity(this);
}
else
{
AsyncGet =null;
CurrentTimeToLocation = getIntent().getIntExtra(FreewayCoffeeItemListView.INTENT_KEY_TIME_TO_LOCATION,0);
OriginalTimeToLocation = getIntent().getIntExtra(FreewayCoffeeItemListView.INTENT_KEY_TIME_TO_LOCATION,0);
}
TextView UserBanner = (TextView)findViewById(R.id.fc_banner_text);
UserBanner.setText(appState.GetUserInfoData().get(FreewayCoffeeItemListView.USER_INFO_NAME_KEY) +", " + getString(R.string.fc_set_time_to));
String Location = getIntent().getStringExtra(FreewayCoffeeItemListView.INTENT_KEY_USER_LOCATION_STRING);
//getString(R.string.
TextView LocationNameView = (TextView)findViewById(R.id.fc_time_to_location_location_label);
LocationNameView.setText(getString(R.string.fc_location) + " " + Location);
TimeToLocationText= (TextView)findViewById(R.id.fc_time_to_location_update_label);
UpdateCurrentTimeToLocationDisplay();
SetSliderMinMaxLabels();
TimeSeeker = (SeekBar)findViewById(R.id.fc_time_to_location_seeker);
TimeSeeker.setMax(appState.GetPreferenceMaxTimeToLocation()+1);
UpdateTimeToLocationSlider();
TimeSeeker.setOnSeekBarChangeListener(new OnSeekBarChangeListener()
{
@Override
public void onProgressChanged(SeekBar seekBar,
int progress, boolean fromUser)
{
CurrentTimeToLocation=progress;
UpdateCurrentTimeToLocationDisplay();
}
@Override
public void onStartTrackingTouch(SeekBar seekBar) {
// TODO Auto-generated method stub
}
@Override
public void onStopTrackingTouch(SeekBar seekBar) {
// TODO Auto-generated method stub
}
});
}
protected void onDestroy ()
{
super.onDestroy();
if(AsyncGet!=null)
{
AsyncGet.UnlinkActivity();
}
}
@Override
public Object onRetainNonConfigurationInstance()
{
if(AsyncGet!=null)
{
AsyncGet.UnlinkActivity();
AsyncGet.CurrentTimeToLocationSaved=CurrentTimeToLocation;
AsyncGet.OriginalTimeToLocationSaved=OriginalTimeToLocation;
return AsyncGet;
}
return null;
}
public void showProgressDialog()
{
TimeToLocProgress = ProgressDialog.show(this, "",
getString(R.string.fc_updating_location_arrival),
true);
}
public void UpdateTimeToLocationSlider()
{
TimeSeeker.setProgress(CurrentTimeToLocation);
}
public void DoCancel(View V)
{
setResult(RESULT_CANCELED);
finish();
}
public void DoReset(View V)
{
CurrentTimeToLocation=OriginalTimeToLocation;
UpdateTimeToLocationSlider();
UpdateCurrentTimeToLocationDisplay();
}
public void DoUpdate(View V)
{
try
{
AsyncGet = new FreewayCoffeeTimeToLocationAsyncGet(this,appState);
String CommandStr = appState.MakeUpdateTimeToLocationURL(CurrentTimeToLocation);
AsyncGet.execute(CommandStr);
}
catch (UnsupportedEncodingException e)
{
// TODO
DisplayNetworkError();
return;
}
}
public void UpdateCurrentTimeToLocationDisplay()
{
if(TimeToLocationText==null)
{
// TODO Track this
TimeToLocationText.setText(getString(R.string.fc_oops));
return;
}
// = "Time: x mins"
TimeToLocationText.setText(getString(R.string.fc_time) + " " +
String.valueOf(CurrentTimeToLocation) + " " +
getString(R.string.fc_minutes));
}
@Override
public void onConfigurationChanged(Configuration newConfig)
{
super.onConfigurationChanged(newConfig);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
}
public void ProcessXMLResult(String XML)
{
/* Get a SAXParser from the SAXPArserFactory. */
SAXParserFactory spf = SAXParserFactory.newInstance();
AsyncGet=null;
try
{
SAXParser sp = spf.newSAXParser();
/* Get the XMLReader of the SAXParser we created. */
XMLReader xr = sp.getXMLReader();
/* Create a new ContentHandler and apply it to the XML-Reader*/
TimeToLocationXMLHandler = new FreewayCoffeeTimeToLocationXMLHandler(appState);
xr.setContentHandler(TimeToLocationXMLHandler);
// Parse the xml-data from our URL.
InputSource is = new InputSource(new StringReader(XML));
//is.setEncoding("UTF-8");
xr.parse(is);
/* Parsing has finished. */
if(TimeToLocProgress!=null)
{
TimeToLocProgress.dismiss();
TimeToLocProgress=null;
}
if(TimeToLocationXMLHandler.NetworkError==true)
{
DisplayNetworkError();
return;
}
if(TimeToLocationXMLHandler.signonResponse!=null)
{
// TODO -=- in general, what do we do for these sub-views if we get say a SIGNON_OK.
// Cannot just reload indefinitely. Can I even call reload here ?
// Return to the parent view and try to let that one refresh ? Lets punt it for now.
// Tell the parent view that some critical login-type thing has happened!
setResult(FreewayCoffeeItemListView.RESULT_CODE_NOT_LOGGED_IN);
this.finish();
}
if(TimeToLocationXMLHandler.UpdateTimeToLocationFlag==true)
{
Toast SuccessToast = Toast.makeText(appState.getApplicationContext(),
getString(R.string.fc_time_to_location_updated),
Toast.LENGTH_SHORT);
SuccessToast.show();
appState.GetUserInfoData().put(FreewayCoffeeItemListView.USER_TIME_TO_LOCATION,String.valueOf(CurrentTimeToLocation));
EndActivity();
}
else
{
// TODO Obviously could do better here.
Toast ErrorToast = Toast.makeText(appState.getApplicationContext(),"Time to location could not be updated. Please try again",Toast.LENGTH_SHORT);
ErrorToast.show();
return;
}
}
catch(SAXException e)
{
//Log.w("FCItemListView",e.getMessage());
DisplayNetworkError();
return;
}
catch (ParserConfigurationException pe)
{
DisplayNetworkError();
return;
}
catch (IOException io)
{
DisplayNetworkError();
return;
}
}
private void EndActivity()
{
setResult(RESULT_OK);
finish();
}
private void DisplayNetworkError()
{
if(TimeToLocProgress!=null)
{
TimeToLocProgress.dismiss();
TimeToLocProgress=null;
}
Toast Err = Toast.makeText(appState.getApplicationContext(),
getString(R.string.fc_network_error),
Toast.LENGTH_SHORT);
Err.show();
}
public void SetSliderMinMaxLabels()
{
TextView TimeToLocationMaxLabel = (TextView)findViewById(R.id.fc_time_to_location_seeker_text_max);
if(TimeToLocationMaxLabel==null)
{
// TODO this doesn't look good
}
else
{
TimeToLocationMaxLabel.setText(String.valueOf(appState.GetPreferenceMaxTimeToLocation()));
}
TextView TimeToLocationMinLabel = (TextView)findViewById(R.id.fc_time_to_location_seeker_text_min);
if(TimeToLocationMinLabel==null)
{
// TODO this doesn't look good
}
else
{
TimeToLocationMinLabel.setText(String.valueOf(0));
}
}
}
// WONT USE BUT LIKE IT FOR SOMETHING ELSE ?
/*
seekBar.setOnSeekBarChangeListener(new OnSeekBarChangeListener() {
@Override
public void onStopTrackingTouch(SeekBar seekBar) {
newProgressValue = seekBar.getProgress();
currentProgress = newProgressValue ;
SharedPreferences.Editor editor = sharedPreferences.edit();
editor.putInt(Key_PROGRESS, newProgressValue);
editor.commit();
}
*/
| |
/*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2021 DBeaver Corp and others
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.ui.navigator.actions;
import org.eclipse.core.commands.ExecutionEvent;
import org.eclipse.core.commands.ExecutionException;
import org.eclipse.core.resources.IContainer;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IFolder;
import org.eclipse.core.resources.IResource;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.Path;
import org.eclipse.core.runtime.QualifiedName;
import org.eclipse.jface.viewers.ISelection;
import org.eclipse.swt.custom.StyledText;
import org.eclipse.swt.dnd.Clipboard;
import org.eclipse.swt.dnd.FileTransfer;
import org.eclipse.swt.widgets.*;
import org.eclipse.ui.handlers.HandlerUtil;
import org.jkiss.code.NotNull;
import org.jkiss.dbeaver.Log;
import org.jkiss.dbeaver.model.navigator.DBNDatabaseNode;
import org.jkiss.dbeaver.model.navigator.DBNNode;
import org.jkiss.dbeaver.model.navigator.DBNResource;
import org.jkiss.dbeaver.model.runtime.DBRProgressMonitor;
import org.jkiss.dbeaver.model.runtime.DBRRunnableWithProgress;
import org.jkiss.dbeaver.runtime.DBWorkbench;
import org.jkiss.dbeaver.ui.UIUtils;
import org.jkiss.dbeaver.ui.dnd.TreeNodeTransfer;
import org.jkiss.dbeaver.ui.navigator.NavigatorUtils;
import org.jkiss.dbeaver.utils.RuntimeUtils;
import org.jkiss.utils.CommonUtils;
import java.io.*;
import java.lang.reflect.InvocationTargetException;
import java.util.Collection;
import java.util.Map;
public class NavigatorHandlerObjectCreateCopy extends NavigatorHandlerObjectCreateBase {
static final Log log = Log.getLog(NavigatorHandlerObjectCreateCopy.class);
@Override
public Object execute(ExecutionEvent event) throws ExecutionException {
Shell activeShell = HandlerUtil.getActiveShell(event);
Control focusControl = activeShell.getDisplay().getFocusControl();
if (focusControl instanceof Text) {
((Text) focusControl).paste();
return null;
} else if (focusControl instanceof StyledText) {
((StyledText) focusControl).paste();
return null;
} else if (focusControl instanceof Combo) {
((Combo) focusControl).paste();
return null;
}
final ISelection selection = HandlerUtil.getCurrentSelection(event);
DBNNode curNode = NavigatorUtils.getSelectedNode(selection);
if (curNode != null) {
Clipboard clipboard = new Clipboard(Display.getDefault());
try {
@SuppressWarnings("unchecked")
Collection<DBNNode> cbNodes = (Collection<DBNNode>) clipboard.getContents(TreeNodeTransfer.getInstance());
if (cbNodes != null) {
for (DBNNode nodeObject : cbNodes) {
if (nodeObject instanceof DBNDatabaseNode) {
createNewObject(HandlerUtil.getActiveWorkbenchWindow(event), curNode, ((DBNDatabaseNode) nodeObject));
} else if (nodeObject instanceof DBNResource && curNode instanceof DBNResource) {
pasteResource((DBNResource) nodeObject, (DBNResource) curNode);
}
}
} else if (curNode instanceof DBNResource) {
String[] files = (String[]) clipboard.getContents(FileTransfer.getInstance());
if (files != null) {
for (String fileName : files) {
final File file = new File(fileName);
if (file.exists()) {
pasteResource(file, (DBNResource) curNode);
}
}
} else {
log.debug("Paste error: unsupported clipboard format. File or folder were expected.");
Display.getCurrent().beep();
}
} else {
log.debug("Paste error: clipboard contains data in unsupported format");
Display.getCurrent().beep();
}
} finally {
clipboard.dispose();
}
}
return null;
}
private void pasteResource(DBNResource resourceNode, DBNResource toFolder) {
final IResource resource = resourceNode.getResource();
final IResource targetResource = toFolder.getResource();
assert resource != null;
assert targetResource != null;
final IContainer targetFolder = targetResource instanceof IContainer ? (IContainer) targetResource : targetResource.getParent();
try {
UIUtils.runInProgressService(new DBRRunnableWithProgress() {
@Override
public void run(DBRProgressMonitor monitor) throws InvocationTargetException, InterruptedException {
try {
copyResource(monitor, resource, targetFolder);
} catch (Exception e) {
throw new InvocationTargetException(e);
}
}
});
} catch (InvocationTargetException e) {
DBWorkbench.getPlatformUI().showError("Copy error", "Error copying resource", e.getTargetException());
} catch (InterruptedException e) {
// ignore
}
}
private void copyResource(@NotNull DBRProgressMonitor monitor, @NotNull IResource resource, @NotNull IContainer targetFolder) throws CoreException, IOException {
final IProgressMonitor nestedMonitor = RuntimeUtils.getNestedMonitor(monitor);
final String extension = resource.getFileExtension();
String targetName = resource.getName();
if (resource.getParent().equals(targetFolder)) {
String plainName = extension != null && !extension.isEmpty() && targetName.endsWith(extension) ?
targetName.substring(0, targetName.length() - extension.length() - 1) : targetName;
for (int i = 1; ; i++) {
String testName = plainName + "-" + i;
if (!CommonUtils.isEmpty(extension)) {
testName += "." + extension;
}
if (targetFolder.findMember(testName) == null) {
targetName = testName;
break;
}
}
} else if (targetFolder.findMember(targetName) != null) {
throw new IOException("Target resource '" + targetName + "' already exists");
}
if (resource instanceof IFile) {
// Copy single file
final IFile targetFile = targetFolder.getFile(new Path(targetName));
if (!targetFile.exists()) {
targetFile.create(new ByteArrayInputStream(new byte[0]), true, nestedMonitor);
}
final Map<QualifiedName, String> props = resource.getPersistentProperties();
if (props != null && !props.isEmpty()) {
for (Map.Entry<QualifiedName, String> prop : props.entrySet()) {
targetFile.setPersistentProperty(prop.getKey(), prop.getValue());
}
}
try (InputStream is = ((IFile) resource).getContents()) {
targetFile.setContents(is, true, true, nestedMonitor);
}
} else if (resource instanceof IFolder) {
// Copy folder with all files and subfolders
}
}
private void pasteResource(final File file, DBNResource toFolder) {
final IResource targetResource = toFolder.getResource();
assert targetResource != null;
final IContainer targetFolder = targetResource instanceof IContainer ? (IContainer) targetResource : targetResource.getParent();
try {
UIUtils.runInProgressService(new DBRRunnableWithProgress() {
@Override
public void run(DBRProgressMonitor monitor) throws InvocationTargetException, InterruptedException {
try {
final IFile targetFile = targetFolder.getFile(new Path(file.getName()));
if (targetFile.exists()) {
throw new IOException("Target file '" + targetFile.getFullPath() + "' already exists");
}
try (InputStream is = new FileInputStream(file)) {
targetFile.create(is, true, monitor.getNestedMonitor());
}
} catch (Exception e) {
throw new InvocationTargetException(e);
}
}
});
} catch (InvocationTargetException e) {
DBWorkbench.getPlatformUI().showError("Copy error", "Error copying resource", e.getTargetException());
} catch (InterruptedException e) {
// ignore
}
}
}
| |
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// http://code.google.com/p/protobuf/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package net.jarlehansen.protobuf.javame;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
/**
* Immutable array of bytes.
*
* @author crazybob@google.com Bob Lee
* @author kenton@google.com Kenton Varda
*/
public final class ByteString {
private final byte[] bytes;
private ByteString(byte[] bytes) {
this.bytes = bytes;
}
/**
* Gets the byte at the given index.
*
* @throws ArrayIndexOutOfBoundsException {@code index} is < 0 or >= size
*/
public byte byteAt(int index) {
return bytes[index];
}
/**
* Gets the number of bytes.
*/
public int size() {
return this.bytes.length;
}
/**
* Returns {@code true} if the size is {@code 0}, {@code false} otherwise.
*/
public boolean isEmpty() {
return this.bytes.length == 0;
}
// =================================================================
// byte[] -> ByteString
/**
* Empty ByteString.
*/
public static final ByteString EMPTY = new ByteString(new byte[0]);
/**
* Copies the given bytes into a {@code ByteString}.
*/
public static ByteString copyFrom(byte[] bytes, int offset, int size) {
byte[] copy = new byte[size];
System.arraycopy(bytes, offset, copy, 0, size);
return new ByteString(copy);
}
/**
* Copies the given bytes into a {@code ByteString}.
*/
public static ByteString copyFrom(byte[] bytes) {
return copyFrom(bytes, 0, bytes.length);
}
/**
* Encodes {@code text} into a sequence of bytes using the named charset
* and returns the result as a {@code ByteString}.
*/
public static ByteString copyFrom(String text, String charsetName)
throws UnsupportedEncodingException {
return new ByteString(text.getBytes(charsetName));
}
/**
* Encodes {@code text} into a sequence of UTF-8 bytes and returns the
* result as a {@code ByteString}.
*/
public static ByteString copyFromUtf8(String text) {
try {
return new ByteString(text.getBytes("UTF-8"));
} catch (UnsupportedEncodingException e) {
throw new RuntimeException("UTF-8 not supported?");
}
}
// =================================================================
// ByteString -> byte[]
/**
* Copies bytes into a buffer at the given offset.
*
* @param target buffer to copy into
* @param offset in the target buffer
*/
public void copyTo(byte[] target, int offset) {
System.arraycopy(bytes, 0, target, offset, bytes.length);
}
/**
* Copies bytes into a buffer.
*
* @param target buffer to copy into
* @param sourceOffset offset within these bytes
* @param targetOffset offset within the target buffer
* @param size number of bytes to copy
*/
public void copyTo(byte[] target, int sourceOffset, int targetOffset,
int size) {
System.arraycopy(bytes, sourceOffset, target, targetOffset, size);
}
/**
* Copies bytes to a {@code byte[]}.
*/
public byte[] toByteArray() {
int size = this.bytes.length;
byte[] copy = new byte[size];
System.arraycopy(this.bytes, 0, copy, 0, size);
return copy;
}
/**
* Constructs a new {@code String} by decoding the bytes using the
* specified charset.
*/
public String toString(String charsetName)
throws UnsupportedEncodingException {
return new String(this.bytes, charsetName);
}
/**
* Constructs a new {@code String} by decoding the bytes as UTF-8.
*/
public String toStringUtf8() {
try {
return new String(this.bytes, "UTF-8");
} catch (UnsupportedEncodingException e) {
throw new RuntimeException("UTF-8 not supported?");
}
}
// =================================================================
// equals() and hashCode()
public boolean equals(Object o) {
if (o == this) {
return true;
}
if (!(o instanceof ByteString)) {
return false;
}
ByteString other = (ByteString) o;
int size = this.bytes.length;
if (size != other.bytes.length) {
return false;
}
byte[] bytes = this.bytes;
byte[] otherBytes = other.bytes;
for (int i = 0; i < size; i++) {
if (bytes[i] != otherBytes[i]) {
return false;
}
}
return true;
}
volatile int hash = 0;
public int hashCode() {
int h = this.hash;
if (h == 0) {
byte[] bytes = this.bytes;
int size = this.bytes.length;
h = size;
for (int i = 0; i < size; i++) {
h = h * 31 + bytes[i];
}
if (h == 0) {
h = 1;
}
this.hash = h;
}
return h;
}
// =================================================================
// Input stream
/**
* Creates an {@code InputStream} which can be used to read the bytes.
*/
public InputStream newInput() {
return new ByteArrayInputStream(bytes);
}
}
| |
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.workflow.engine.util;
import org.apache.airavata.common.utils.StringUtil;
import org.apache.airavata.workflow.engine.interpretor.WorkFlowInterpreterException;
import org.apache.airavata.workflow.engine.invoker.Invoker;
import org.apache.airavata.workflow.model.exceptions.WorkflowException;
import org.apache.airavata.workflow.model.exceptions.WorkflowRuntimeException;
import org.apache.airavata.workflow.model.graph.DataPort;
import org.apache.airavata.workflow.model.graph.Node;
import org.apache.airavata.workflow.model.graph.system.EndForEachNode;
import org.apache.airavata.workflow.model.graph.system.ForEachNode;
import org.apache.airavata.workflow.model.graph.system.InputNode;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
//import xsul5.XmlConstants;
import java.net.HttpURLConnection;
import java.net.SocketTimeoutException;
import java.net.URL;
import java.util.Arrays;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
//import org.apache.airavata.client.stub.interpretor.NameValue;
public class XBayaUtil {
private static final Logger logger = LoggerFactory.getLogger(XBayaUtil.class);
public static final String JCR_USER = "jcr.username";
public static final String JCR_PASS = "jcr.password";
public static final String JCR_URL = "jcr.url";
public static boolean isURLExists(String URLName) {
try {
if (!URLName.toUpperCase().contains("HTTP"))
URLName = "http://" + URLName;
URL url = new URL(URLName);
System.setProperty("java.net.useSystemProxies", "true");
HttpURLConnection urlConn = (HttpURLConnection) url.openConnection();
urlConn.setConnectTimeout(9000);
urlConn.setReadTimeout(9000);
urlConn.connect();
if (HttpURLConnection.HTTP_OK == urlConn.getResponseCode())
return true;
else
return false;
} catch (SocketTimeoutException e) {
return false;
} catch (Exception e) {
logger.error(e.getMessage(), e);
return false;
}
}
public static Object getInputsForForEachNode(final ForEachNode forEachNode,
final LinkedList<String> listOfValues, Map<Node, Invoker> invokerMap) throws WorkflowException {
Node forEachInputNode = forEachNode.getInputPort(0).getFromNode();
// if input node for for-each is WSNode
Object returnValForProvenance = null;
if (forEachInputNode instanceof InputNode) {
for (DataPort dataPort : forEachNode.getInputPorts()) {
returnValForProvenance = XBayaUtil
.findInputFromPort(dataPort, invokerMap);
if (null == returnValForProvenance) {
throw new WorkFlowInterpreterException(
"Unable to find input for the node:"
+ forEachNode.getID());
}
String[] vals = StringUtil.getElementsFromString(returnValForProvenance.toString());
listOfValues.addAll(Arrays.asList(vals));
}
}
return returnValForProvenance;
}
/**
*
* @param inputPort
* @param invokerMap
* @return
* @throws WorkflowException
*/
public static Object findInputFromPort(DataPort inputPort, Map<Node, Invoker> invokerMap) throws WorkflowException {
Object outputVal = null;
// Node fromNode = inputPort.getFromNode();
// if (fromNode instanceof InputNode) {
// outputVal = ((InputNode) fromNode).getDefaultValue();
// } else if (fromNode instanceof ConstantNode) {
// outputVal = ((ConstantNode) fromNode).getValue();
// } else if (fromNode instanceof EndifNode) {
// Invoker fromInvoker = invokerMap.get(fromNode);
// outputVal = fromInvoker.getOutput(inputPort.getFromPort().getID());
// } else if (fromNode instanceof InstanceNode) {
// return ((InstanceNode) fromNode).getOutputInstanceId();
// } else if (fromNode instanceof EndForEachNode) {
// outputVal = "";
// Invoker workflowInvoker = invokerMap.get(fromNode);
// String outputName = fromNode.getOutputPort(0).getName();
// XmlElement msgElmt = XmlConstants.BUILDER
// .parseFragmentFromString("<temp>"
// + workflowInvoker.getOutput(outputName) + "</temp>");
// Iterator valItr = msgElmt.children().iterator();
// while (valItr.hasNext()) {
// Object object2 = valItr.next();
// if (object2 instanceof XmlElement) {
// outputVal = outputVal
// + StringUtil.DELIMETER
// + StringUtil.quoteString(((XmlElement) object2).children().iterator()
// .next().toString());
// }
// }
// outputVal = ((String) outputVal).substring(1,
// ((String) outputVal).length());
// } else {
// Invoker fromInvoker = invokerMap.get(fromNode);
// try {
// if (fromInvoker != null)
// outputVal = fromInvoker.getOutput(inputPort.getFromPort()
// .getName());
//
// } catch (Exception e) {
// // if the value is still null look it up from the inputport name
// // because the value is set to the input port name at some point
// // there is no harm in doing this
// if (null == outputVal) {
// outputVal = fromInvoker.getOutput(inputPort.getName());
// }
// }
//
// }
return outputVal;
}
/**
* @param node
* @return
*/
public static Node findEndForEachFor(ForEachNode node) {
Collection<Node> toNodes = node.getOutputPort(0).getToNodes();
if(toNodes.size() != 1){
throw new WorkflowRuntimeException("ForEach output does not contain single out-edge");
}
Node middleNode = toNodes.iterator().next();
List<DataPort> outputPorts = middleNode.getOutputPorts();
for (DataPort dataPort : outputPorts) {
if(dataPort.getToNodes().size() == 1){
Node possibleEndForEachNode = dataPort.getToNodes().get(0);
if(possibleEndForEachNode instanceof EndForEachNode){
return possibleEndForEachNode;
}
}
}
throw new WorkflowRuntimeException("EndForEachNode not found");
}
/* public static List<NameValue> getIOParameterData(String xml) throws ParserConfigurationException, SAXException, IOException{
List<NameValue> parameters=new ArrayList<NameValue>();
Document parameterDocument = XMLUtils.newDocument(new ByteArrayInputStream(xml.getBytes()));
org.w3c.dom.NodeList childNodes = parameterDocument.getDocumentElement().getChildNodes();
for(int i=0;i<childNodes.getLength();i++){
org.w3c.dom.Node parameterNode = childNodes.item(i);
NameValue pair = new NameValue();
pair.setName(parameterNode.getLocalName());
pair.setValue(parameterNode.getTextContent());
parameters.add(pair);
}
return parameters;
}*/
// public static AiravataRegistry2 getExperimentCatalog(URL url) throws IOException, RepositoryException, URISyntaxException {
// Properties properties = new Properties();
// properties.load(url.openStream());
// JCRComponentRegistry jcrComponentRegistry = new JCRComponentRegistry(new URI((String) properties.get(JCR_URL)),
// (String) properties.get(JCR_USER),(String) properties.get(JCR_PASS));
// return jcrComponentRegistry.getExperimentCatalog();
// }
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.support;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.camel.CamelContext;
import org.apache.camel.DelegateEndpoint;
import org.apache.camel.Endpoint;
import org.apache.camel.Exchange;
import org.apache.camel.ExchangePattern;
import org.apache.camel.ExtendedCamelContext;
import org.apache.camel.NoSuchBeanException;
import org.apache.camel.PollingConsumer;
import org.apache.camel.Processor;
import org.apache.camel.ResolveEndpointFailedException;
import org.apache.camel.Route;
import org.apache.camel.spi.PropertiesComponent;
import org.apache.camel.support.service.ServiceHelper;
import org.apache.camel.util.StringHelper;
import org.apache.camel.util.URISupport;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.camel.util.StringHelper.after;
/**
* Some helper methods for working with {@link Endpoint} instances
*/
public final class EndpointHelper {
private static final Logger LOG = LoggerFactory.getLogger(EndpointHelper.class);
private static final AtomicLong ENDPOINT_COUNTER = new AtomicLong();
private EndpointHelper() {
//Utility Class
}
/**
* Resolves the endpoint uri that may have property placeholders (supports optional property placeholders).
*
* @param camelContext the camel context
* @param uri the endpoint uri
* @return returns endpoint uri with property placeholders resolved
*/
public static String resolveEndpointUriPropertyPlaceholders(CamelContext camelContext, String uri) {
// the uri may have optional property placeholders which is not possible to resolve
// so we keep the unresolved in the uri, which we then afterwards will remove
// which is a little complex depending on the placeholder is from context-path or query parameters
// in the uri string
try {
uri = camelContext.adapt(ExtendedCamelContext.class).resolvePropertyPlaceholders(uri, true);
if (uri == null || uri.isEmpty()) {
return uri;
}
String prefix = PropertiesComponent.PREFIX_OPTIONAL_TOKEN;
if (uri.contains(prefix)) {
String unresolved = uri;
uri = doResolveEndpointUriOptionalPropertyPlaceholders(unresolved);
LOG.trace("Unresolved optional placeholders removed from uri: {} -> {}", unresolved, uri);
}
LOG.trace("Resolved property placeholders with uri: {}", uri);
} catch (Exception e) {
throw new ResolveEndpointFailedException(uri, e);
}
return uri;
}
private static String doResolveEndpointUriOptionalPropertyPlaceholders(String uri) throws URISyntaxException {
String prefix = PropertiesComponent.PREFIX_OPTIONAL_TOKEN;
// find query position which is the first question mark that is not part of the optional token prefix
int pos = 0;
for (int i = 0; i < uri.length(); i++) {
char ch = uri.charAt(i);
if (ch == '?') {
// ensure that its not part of property prefix
if (i > 2) {
char ch1 = uri.charAt(i - 1);
char ch2 = uri.charAt(i - 2);
if (ch1 != '{' && ch2 != '{') {
pos = i;
break;
}
} else {
pos = i;
break;
}
}
}
String base = pos > 0 ? uri.substring(0, pos) : uri;
String query = pos > 0 ? uri.substring(pos + 1) : null;
// the base (context path) should remove all unresolved property placeholders
// which is done by replacing all begin...end tokens with an empty string
String pattern = "\\{\\{?.*}}";
base = base.replaceAll(pattern, "");
// the query parameters needs to be rebuild by removing the unresolved key=value pairs
if (query != null && query.contains(prefix)) {
Map<String, Object> params = URISupport.parseQuery(query);
Map<String, Object> keep = new LinkedHashMap<>();
for (Map.Entry<String, Object> entry : params.entrySet()) {
String key = entry.getKey();
if (key.startsWith(prefix)) {
continue;
}
Object value = entry.getValue();
if (value instanceof String && ((String) value).startsWith(prefix)) {
continue;
}
keep.put(key, value);
}
// rebuild query
query = URISupport.createQueryString(keep);
}
// assemble uri as answer
uri = query != null && !query.isEmpty() ? base + "?" + query : base;
return uri;
}
/**
* Normalize uri so we can do endpoint hits with minor mistakes and parameters is not in the same order.
*
* @param uri the uri
* @return normalized uri
* @throws ResolveEndpointFailedException if uri cannot be normalized
*/
public static String normalizeEndpointUri(String uri) {
try {
uri = URISupport.normalizeUri(uri);
} catch (Exception e) {
throw new ResolveEndpointFailedException(uri, e);
}
return uri;
}
/**
* Creates a {@link PollingConsumer} and polls all pending messages on the endpoint and invokes the given
* {@link Processor} to process each {@link Exchange} and then closes down the consumer and throws any exceptions
* thrown.
*/
public static void pollEndpoint(Endpoint endpoint, Processor processor, long timeout) throws Exception {
PollingConsumer consumer = endpoint.createPollingConsumer();
try {
ServiceHelper.startService(consumer);
while (true) {
Exchange exchange = consumer.receive(timeout);
if (exchange == null) {
break;
} else {
processor.process(exchange);
}
}
} finally {
try {
ServiceHelper.stopAndShutdownService(consumer);
} catch (Exception e) {
LOG.warn("Failed to stop PollingConsumer: {}. This example is ignored.", consumer, e);
}
}
}
/**
* Creates a {@link PollingConsumer} and polls all pending messages on the endpoint and invokes the given
* {@link Processor} to process each {@link Exchange} and then closes down the consumer and throws any exceptions
* thrown.
*/
public static void pollEndpoint(Endpoint endpoint, Processor processor) throws Exception {
pollEndpoint(endpoint, processor, 1000L);
}
/**
* Matches the endpoint with the given pattern.
* <p/>
* The endpoint will first resolve property placeholders using
* {@link #resolveEndpointUriPropertyPlaceholders(CamelContext, String)}
* <p/>
* The match rules are applied in this order:
* <ul>
* <li>exact match, returns true</li>
* <li>wildcard match (pattern ends with a * and the uri starts with the pattern), returns true</li>
* <li>regular expression match, returns true</li>
* <li>exact match with uri normalization of the pattern if possible, returns true</li>
* <li>otherwise returns false</li>
* </ul>
*
* @param context the Camel context, if <tt>null</tt> then property placeholder resolution is skipped.
* @param uri the endpoint uri
* @param pattern a pattern to match
* @return <tt>true</tt> if match, <tt>false</tt> otherwise.
*/
public static boolean matchEndpoint(CamelContext context, String uri, String pattern) {
if (context != null) {
try {
uri = resolveEndpointUriPropertyPlaceholders(context, uri);
} catch (Exception e) {
throw new ResolveEndpointFailedException(uri, e);
}
}
// normalize uri so we can do endpoint hits with minor mistakes and parameters is not in the same order
uri = normalizeEndpointUri(uri);
// we need to test with and without scheme separators (//)
boolean match = PatternHelper.matchPattern(toggleUriSchemeSeparators(uri), pattern);
match |= PatternHelper.matchPattern(uri, pattern);
if (!match && pattern != null && pattern.contains("?")) {
// try normalizing the pattern as a uri for exact matching, so parameters are ordered the same as in the endpoint uri
try {
pattern = URISupport.normalizeUri(pattern);
// try both with and without scheme separators (//)
match = toggleUriSchemeSeparators(uri).equalsIgnoreCase(pattern);
return match || uri.equalsIgnoreCase(pattern);
} catch (URISyntaxException e) {
//Can't normalize and original match failed
return false;
} catch (Exception e) {
throw new ResolveEndpointFailedException(uri, e);
}
}
return match;
}
/**
* Toggles // separators in the given uri. If the uri does not contain ://, the slashes are added, otherwise they
* are removed.
*
* @param normalizedUri The uri to add/remove separators in
* @return The uri with separators added or removed
*/
private static String toggleUriSchemeSeparators(String normalizedUri) {
if (normalizedUri.contains("://")) {
String scheme = StringHelper.before(normalizedUri, "://");
String path = after(normalizedUri, "://");
return scheme + ":" + path;
} else {
String scheme = StringHelper.before(normalizedUri, ":");
String path = after(normalizedUri, ":");
return scheme + "://" + path;
}
}
/**
* Sets the regular properties on the given bean
*
* @param context the camel context
* @param bean the bean
* @param parameters parameters
* @throws Exception is thrown if setting property fails
* @deprecated use PropertyBindingSupport
*/
@Deprecated
public static void setProperties(CamelContext context, Object bean, Map<String, Object> parameters) throws Exception {
// use the property binding which can do more advanced configuration
PropertyBindingSupport.build().bind(context, bean, parameters);
}
/**
* Sets the reference properties on the given bean
* <p/>
* This is convention over configuration, setting all reference parameters (using
* {@link #isReferenceParameter(String)} by looking it up in registry and setting it on the bean if possible.
*
* @param context the camel context
* @param bean the bean
* @param parameters parameters
* @throws Exception is thrown if setting property fails
* @deprecated use PropertyBindingSupport
*/
@Deprecated
public static void setReferenceProperties(CamelContext context, Object bean, Map<String, Object> parameters)
throws Exception {
Iterator<Map.Entry<String, Object>> it = parameters.entrySet().iterator();
while (it.hasNext()) {
Map.Entry<String, Object> entry = it.next();
String name = entry.getKey();
Object v = entry.getValue();
String value = v != null ? v.toString() : null;
if (isReferenceParameter(value)) {
boolean hit = context.adapt(ExtendedCamelContext.class).getBeanIntrospection().setProperty(context,
context.getTypeConverter(), bean, name, null, value, true, false, false);
if (hit) {
// must remove as its a valid option and we could configure it
it.remove();
}
}
}
}
/**
* Is the given parameter a reference parameter (starting with a # char)
*
* @param parameter the parameter
* @return <tt>true</tt> if its a reference parameter
*/
public static boolean isReferenceParameter(String parameter) {
return parameter != null && parameter.trim().startsWith("#") && parameter.trim().length() > 1;
}
/**
* Resolves a reference parameter by making a lookup in the registry.
*
* @param <T> type of object to lookup.
* @param context Camel context to use for lookup.
* @param value reference parameter value.
* @param type type of object to lookup.
* @return lookup result.
* @throws IllegalArgumentException if referenced object was not found in registry.
*/
public static <T> T resolveReferenceParameter(CamelContext context, String value, Class<T> type) {
return resolveReferenceParameter(context, value, type, true);
}
/**
* Resolves a reference parameter by making a lookup in the registry.
*
* @param <T> type of object to lookup.
* @param context Camel context to use for lookup.
* @param value reference parameter value.
* @param type type of object to lookup.
* @return lookup result (or <code>null</code> only if <code>mandatory</code> is
* <code>false</code>).
* @throws NoSuchBeanException if object was not found in registry and <code>mandatory</code> is <code>true</code>.
*/
public static <T> T resolveReferenceParameter(CamelContext context, String value, Class<T> type, boolean mandatory) {
// it may refer to a type
if (value.startsWith("#type:")) {
try {
Object answer = null;
String valueNoHash = value.substring(6);
Class<?> clazz = context.getClassResolver().resolveMandatoryClass(valueNoHash);
Set<T> set = context.getRegistry().findByType(type);
if (set.size() == 1) {
answer = set.iterator().next();
} else if (set.size() > 1) {
throw new NoSuchBeanException(
value, "Found " + set.size() + " beans of type: " + clazz + ". Only 1 bean instance is supported.");
}
if (mandatory && answer == null) {
throw new NoSuchBeanException(value);
}
return type.cast(answer);
} catch (ClassNotFoundException e) {
throw new NoSuchBeanException(value, e);
}
} else {
String valueNoHash = value.replace("#bean:", "");
valueNoHash = valueNoHash.replace("#", "");
if (mandatory) {
return CamelContextHelper.mandatoryLookupAndConvert(context, valueNoHash, type);
} else {
return CamelContextHelper.lookupAndConvert(context, valueNoHash, type);
}
}
}
/**
* Resolves a reference list parameter by making lookups in the registry. The parameter value must be one of the
* following:
* <ul>
* <li>a comma-separated list of references to beans of type T</li>
* <li>a single reference to a bean type T</li>
* <li>a single reference to a bean of type java.util.List</li>
* </ul>
*
* @param context Camel context to use for lookup.
* @param value reference parameter value.
* @param elementType result list element type.
* @return list of lookup results, will always return a list.
* @throws IllegalArgumentException if any referenced object was not found in registry.
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
public static <T> List<T> resolveReferenceListParameter(CamelContext context, String value, Class<T> elementType) {
if (value == null) {
return new ArrayList<>();
}
List<String> elements = Arrays.asList(value.split(","));
if (elements.size() == 1) {
Object bean = resolveReferenceParameter(context, elements.get(0).trim(), Object.class);
if (bean instanceof List) {
// The bean is a list
return (List) bean;
} else {
// The bean is a list element
List<T> singleElementList = new ArrayList<>();
singleElementList.add(elementType.cast(bean));
return singleElementList;
}
} else { // more than one list element
List<T> result = new ArrayList<>(elements.size());
for (String element : elements) {
result.add(resolveReferenceParameter(context, element.trim(), elementType));
}
return result;
}
}
/**
* Resolves a parameter, by doing a reference lookup if the parameter is a reference, and converting the parameter
* to the given type.
*
* @param <T> type of object to convert the parameter value as.
* @param context Camel context to use for lookup.
* @param value parameter or reference parameter value.
* @param type type of object to lookup.
* @return lookup result if it was a reference parameter, or the value converted to the
* given type
* @throws IllegalArgumentException if referenced object was not found in registry.
*/
public static <T> T resolveParameter(CamelContext context, String value, Class<T> type) {
T result;
if (EndpointHelper.isReferenceParameter(value)) {
result = EndpointHelper.resolveReferenceParameter(context, value, type);
} else {
result = context.getTypeConverter().convertTo(type, value);
}
return result;
}
/**
* Gets the route id for the given endpoint in which there is a consumer listening.
*
* @param endpoint the endpoint
* @return the route id, or <tt>null</tt> if none found
*/
public static String getRouteIdFromEndpoint(Endpoint endpoint) {
if (endpoint == null || endpoint.getCamelContext() == null) {
return null;
}
List<Route> routes = endpoint.getCamelContext().getRoutes();
for (Route route : routes) {
if (route.getEndpoint().equals(endpoint)
|| route.getEndpoint().getEndpointKey().equals(endpoint.getEndpointKey())) {
return route.getId();
}
}
return null;
}
/**
* A helper method for Endpoint implementations to create new Ids for Endpoints which also implement
* {@link org.apache.camel.spi.HasId}
*/
public static String createEndpointId() {
return "endpoint" + ENDPOINT_COUNTER.incrementAndGet();
}
/**
* Lookup the id the given endpoint has been enlisted with in the {@link org.apache.camel.spi.Registry}.
*
* @param endpoint the endpoint
* @return the endpoint id, or <tt>null</tt> if not found
*/
public static String lookupEndpointRegistryId(Endpoint endpoint) {
if (endpoint == null || endpoint.getCamelContext() == null) {
return null;
}
// it may be a delegate endpoint, which we need to match as well
Endpoint delegate = null;
if (endpoint instanceof DelegateEndpoint) {
delegate = ((DelegateEndpoint) endpoint).getEndpoint();
}
Map<String, Endpoint> map = endpoint.getCamelContext().getRegistry().findByTypeWithName(Endpoint.class);
for (Map.Entry<String, Endpoint> entry : map.entrySet()) {
if (entry.getValue().equals(endpoint) || entry.getValue().equals(delegate)) {
return entry.getKey();
}
}
// not found
return null;
}
/**
* Attempts to resolve if the url has an <tt>exchangePattern</tt> option configured
*
* @param url the url
* @return the exchange pattern, or <tt>null</tt> if the url has no <tt>exchangePattern</tt> configured.
*/
public static ExchangePattern resolveExchangePatternFromUrl(String url) {
// optimize to use simple string contains check
if (url.contains("exchangePattern=InOnly")) {
return ExchangePattern.InOnly;
} else if (url.contains("exchangePattern=InOut")) {
return ExchangePattern.InOut;
} else if (url.contains("exchangePattern=InOptionalOut")) {
return ExchangePattern.InOptionalOut;
}
return null;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.security.token;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.FileInputStream;
import java.io.File;
import java.io.IOException;
import java.io.PrintStream;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.security.token.DtFetcher;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
public class TestDtUtilShell {
private static byte[] IDENTIFIER = {
0x69, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72};
private static byte[] PASSWORD = {
0x70, 0x61, 0x73, 0x73, 0x77, 0x6f, 0x72, 0x64};
private static Text KIND = new Text("testTokenKind");
private static Text SERVICE = new Text("testTokenService");
private static Text SERVICE2 = new Text("ecivreSnekoTtset");
private static Configuration defaultConf = new Configuration();
private static FileSystem localFs = null;
private final String alias = "proxy_ip:1234";
private final String renewer = "yarn";
private final String getUrl = SERVICE_GET.toString() + "://localhost:9000/";
private final String getUrl2 = "http://localhost:9000/";
public static Text SERVICE_GET = new Text("testTokenServiceGet");
public static Text KIND_GET = new Text("testTokenKindGet");
public static Token<?> MOCK_TOKEN =
new Token(IDENTIFIER, PASSWORD, KIND_GET, SERVICE_GET);
static {
try {
defaultConf.set("fs.defaultFS", "file:///");
localFs = FileSystem.getLocal(defaultConf);
} catch (IOException e) {
throw new RuntimeException("init failure", e);
}
}
private final ByteArrayOutputStream outContent = new ByteArrayOutputStream();
private final Path workDir = new Path(
GenericTestUtils.getTestDir("TestDtUtilShell").getAbsolutePath());
private final Path tokenFile = new Path(workDir, "testPrintTokenFile");
private final Path tokenFile2 = new Path(workDir, "testPrintTokenFile2");
private final Path tokenLegacyFile = new Path(workDir, "testPrintTokenFile3");
private final Path tokenFileGet = new Path(workDir, "testGetTokenFile");
private final String tokenFilename = tokenFile.toString();
private final String tokenFilename2 = tokenFile2.toString();
private final String tokenFilenameGet = tokenFileGet.toString();
private String[] args = null;
private DtUtilShell dt = null;
private int rc = 0;
@Before
public void setup() throws Exception {
localFs.mkdirs(localFs.makeQualified(workDir));
makeTokenFile(tokenFile, false, null);
makeTokenFile(tokenFile2, false, SERVICE2);
makeTokenFile(tokenLegacyFile, true, null);
dt = new DtUtilShell();
dt.setConf(new Configuration());
dt.setOut(new PrintStream(outContent));
outContent.reset();
rc = 0;
}
@After
public void teardown() throws Exception {
localFs.delete(localFs.makeQualified(workDir), true);
}
public void makeTokenFile(Path tokenPath, boolean legacy, Text service)
throws IOException {
if (service == null) {
service = SERVICE;
}
Credentials creds = new Credentials();
Token<? extends TokenIdentifier> tok = (Token<? extends TokenIdentifier>)
new Token(IDENTIFIER, PASSWORD, KIND, service);
creds.addToken(tok.getService(), tok);
if (legacy) {
creds.writeLegacyTokenStorageLocalFile(new File(tokenPath.toString()));
} else {
creds.writeTokenStorageFile(tokenPath, defaultConf);
}
}
@Test
public void testPrint() throws Exception {
args = new String[] {"print", tokenFilename};
rc = dt.run(args);
assertEquals("test simple print exit code", 0, rc);
assertTrue("test simple print output kind:\n" + outContent.toString(),
outContent.toString().contains(KIND.toString()));
assertTrue("test simple print output service:\n" + outContent.toString(),
outContent.toString().contains(SERVICE.toString()));
outContent.reset();
args = new String[] {"print", tokenLegacyFile.toString()};
rc = dt.run(args);
assertEquals("test legacy print exit code", 0, rc);
assertTrue("test simple print output kind:\n" + outContent.toString(),
outContent.toString().contains(KIND.toString()));
assertTrue("test simple print output service:\n" + outContent.toString(),
outContent.toString().contains(SERVICE.toString()));
outContent.reset();
args = new String[] {
"print", "-alias", SERVICE.toString(), tokenFilename};
rc = dt.run(args);
assertEquals("test alias print exit code", 0, rc);
assertTrue("test simple print output kind:\n" + outContent.toString(),
outContent.toString().contains(KIND.toString()));
assertTrue("test simple print output service:\n" + outContent.toString(),
outContent.toString().contains(SERVICE.toString()));
outContent.reset();
args = new String[] {
"print", "-alias", "not-a-serivce", tokenFilename};
rc = dt.run(args);
assertEquals("test no alias print exit code", 0, rc);
assertFalse("test no alias print output kind:\n" + outContent.toString(),
outContent.toString().contains(KIND.toString()));
assertFalse("test no alias print output service:\n" + outContent.toString(),
outContent.toString().contains(SERVICE.toString()));
}
@Test
public void testEdit() throws Exception {
String oldService = SERVICE2.toString();
String newAlias = "newName:12345";
args = new String[] {"edit",
"-service", oldService, "-alias", newAlias, tokenFilename2};
rc = dt.run(args);
assertEquals("test simple edit exit code", 0, rc);
args = new String[] {"print", "-alias", oldService, tokenFilename2};
rc = dt.run(args);
assertEquals("test simple edit print old exit code", 0, rc);
assertTrue("test simple edit output kind old:\n" + outContent.toString(),
outContent.toString().contains(KIND.toString()));
assertTrue("test simple edit output service old:\n" + outContent.toString(),
outContent.toString().contains(oldService));
args = new String[] {"print", "-alias", newAlias, tokenFilename2};
rc = dt.run(args);
assertEquals("test simple edit print new exit code", 0, rc);
assertTrue("test simple edit output kind new:\n" + outContent.toString(),
outContent.toString().contains(KIND.toString()));
assertTrue("test simple edit output service new:\n" + outContent.toString(),
outContent.toString().contains(newAlias));
}
@Test
public void testAppend() throws Exception {
args = new String[] {"append", tokenFilename, tokenFilename2};
rc = dt.run(args);
assertEquals("test simple append exit code", 0, rc);
args = new String[] {"print", tokenFilename2};
rc = dt.run(args);
assertEquals("test simple append print exit code", 0, rc);
assertTrue("test simple append output kind:\n" + outContent.toString(),
outContent.toString().contains(KIND.toString()));
assertTrue("test simple append output service:\n" + outContent.toString(),
outContent.toString().contains(SERVICE.toString()));
assertTrue("test simple append output service:\n" + outContent.toString(),
outContent.toString().contains(SERVICE2.toString()));
}
@Test
public void testRemove() throws Exception {
args = new String[] {"remove", "-alias", SERVICE.toString(), tokenFilename};
rc = dt.run(args);
assertEquals("test simple remove exit code", 0, rc);
args = new String[] {"print", tokenFilename};
rc = dt.run(args);
assertEquals("test simple remove print exit code", 0, rc);
assertFalse("test simple remove output kind:\n" + outContent.toString(),
outContent.toString().contains(KIND.toString()));
assertFalse("test simple remove output service:\n" + outContent.toString(),
outContent.toString().contains(SERVICE.toString()));
}
@Test
public void testGet() throws Exception {
args = new String[] {"get", getUrl, tokenFilenameGet};
rc = dt.run(args);
assertEquals("test mocked get exit code", 0, rc);
args = new String[] {"print", tokenFilenameGet};
rc = dt.run(args);
String oc = outContent.toString();
assertEquals("test print after get exit code", 0, rc);
assertTrue("test print after get output kind:\n" + oc,
oc.contains(KIND_GET.toString()));
assertTrue("test print after get output service:\n" + oc,
oc.contains(SERVICE_GET.toString()));
}
@Test
public void testGetWithServiceFlag() throws Exception {
args = new String[] {"get", getUrl2, "-service", SERVICE_GET.toString(),
tokenFilenameGet};
rc = dt.run(args);
assertEquals("test mocked get with service flag exit code", 0, rc);
args = new String[] {"print", tokenFilenameGet};
rc = dt.run(args);
String oc = outContent.toString();
assertEquals("test print after get with service flag exit code", 0, rc);
assertTrue("test print after get with service flag output kind:\n" + oc,
oc.contains(KIND_GET.toString()));
assertTrue("test print after get with service flag output service:\n" + oc,
oc.contains(SERVICE_GET.toString()));
}
@Test
public void testGetWithAliasFlag() throws Exception {
args = new String[] {"get", getUrl, "-alias", alias, tokenFilenameGet};
rc = dt.run(args);
assertEquals("test mocked get with alias flag exit code", 0, rc);
args = new String[] {"print", tokenFilenameGet};
rc = dt.run(args);
String oc = outContent.toString();
assertEquals("test print after get with alias flag exit code", 0, rc);
assertTrue("test print after get with alias flag output kind:\n" + oc,
oc.contains(KIND_GET.toString()));
assertTrue("test print after get with alias flag output alias:\n" + oc,
oc.contains(alias));
assertFalse("test print after get with alias flag output old service:\n" +
oc, oc.contains(SERVICE_GET.toString()));
}
@Test
public void testFormatJavaFlag() throws Exception {
args = new String[] {"get", getUrl, "-format", "java", tokenFilenameGet};
rc = dt.run(args);
assertEquals("test mocked get with java format flag exit code", 0, rc);
Credentials creds = new Credentials();
Credentials spyCreds = Mockito.spy(creds);
DataInputStream in = new DataInputStream(
new FileInputStream(tokenFilenameGet));
spyCreds.readTokenStorageStream(in);
Mockito.verify(spyCreds).readFields(in);
}
@Test
public void testFormatProtoFlag() throws Exception {
args = new String[] {
"get", getUrl, "-format", "protobuf", tokenFilenameGet};
rc = dt.run(args);
assertEquals("test mocked get with protobuf format flag exit code", 0, rc);
Credentials creds = new Credentials();
Credentials spyCreds = Mockito.spy(creds);
DataInputStream in = new DataInputStream(
new FileInputStream(tokenFilenameGet));
spyCreds.readTokenStorageStream(in);
Mockito.verify(spyCreds).readProto(in);
}
}
| |
/*
* Copyright 2016 Wultra s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.getlime.push.client;
import com.google.common.io.BaseEncoding;
import com.wultra.core.rest.client.base.DefaultRestClient;
import com.wultra.core.rest.client.base.RestClient;
import com.wultra.core.rest.client.base.RestClientException;
import io.getlime.core.rest.model.base.entity.Error;
import io.getlime.core.rest.model.base.request.ObjectRequest;
import io.getlime.core.rest.model.base.response.ObjectResponse;
import io.getlime.core.rest.model.base.response.Response;
import io.getlime.push.model.base.PagedResponse;
import io.getlime.push.model.entity.ListOfUsers;
import io.getlime.push.model.entity.PushMessage;
import io.getlime.push.model.entity.PushMessageBody;
import io.getlime.push.model.entity.PushMessageSendResult;
import io.getlime.push.model.request.*;
import io.getlime.push.model.response.*;
import io.getlime.push.model.validator.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.core.ParameterizedTypeReference;
import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.util.Collections;
import java.util.List;
/**
* Simple class for interacting with the push server RESTful API.
*
* @author Petr Dvorak, petr@wultra.com
* @author Martin Tupy, martin.tupy.work@gmail.com
*/
public class PushServerClient {
private static final Logger logger = LoggerFactory.getLogger(PushServerClient.class);
private final RestClient restClient;
/**
* Main constructor with the push server base URL.
* @param serviceBaseUrl Push server instance base URL.
* @throws PushServerClientException Thrown in case REST client initialization fails.
*/
public PushServerClient(String serviceBaseUrl) throws PushServerClientException {
try {
this.restClient = DefaultRestClient.builder().baseUrl(serviceBaseUrl).build();
} catch (RestClientException ex) {
throw new PushServerClientException("Rest client initialization failed, error: " + ex.getMessage());
}
}
// Client calls
/**
* Returns service information
*
* @return True if service is running.
* @throws PushServerClientException In case of network, response / JSON processing, or other IO error.
*/
public ObjectResponse<ServiceStatusResponse> getServiceStatus() throws PushServerClientException {
logger.info("Calling push server status service - start");
final ObjectResponse<ServiceStatusResponse> result = getObjectImpl("/push/service/status", null, ServiceStatusResponse.class);
logger.info("Calling push server status service - finish");
return result;
}
/**
* Register anonymous device to the push server.
*
* @param appId PowerAuth application app ID.
* @param token Token received from the push service provider (APNs, FCM).
* @param platform Mobile platform (iOS, Android).
* @return True if device registration was successful, false otherwise.
* @throws PushServerClientException In case of network, response / JSON processing, or other IO error.
*/
public boolean createDevice(Long appId, String token, MobilePlatform platform) throws PushServerClientException {
return createDevice(appId, token, platform, null);
}
/**
* Register device associated with activation ID to the push server.
*
* @param appId PowerAuth application app ID.
* @param token Token received from the push service provider (APNs, FCM).
* @param platform Mobile platform (iOS, Android).
* @param activationId PowerAuth activation ID.
* @return True if device registration was successful, false otherwise.
* @throws PushServerClientException In case of network, response / JSON processing, or other IO error.
*/
public boolean createDevice(Long appId, String token, MobilePlatform platform, String activationId) throws PushServerClientException {
CreateDeviceRequest request = new CreateDeviceRequest();
request.setAppId(appId);
request.setToken(token);
request.setPlatform(platform.value());
request.setActivationId(activationId);
// Validate request on the client side.
String error = CreateDeviceRequestValidator.validate(request);
if (error != null) {
throw new PushServerClientException(error);
}
logger.info("Calling create device service, appId: {}, token: {}, platform: {} - start", appId, maskToken(token), platform.value());
Response response = postObjectImpl("/push/device/create", new ObjectRequest<>(request));
logger.info("Calling create device service, appId: {}, token: {}, platform: {} - finish", appId, maskToken(token), platform.value());
return response.getStatus().equals(Response.Status.OK);
}
/**
* Register device associated with multiple activation IDs to the push server.
*
* @param appId PowerAuth application app ID.
* @param token Token received from the push service provider (APNs, FCM).
* @param platform Mobile platform (iOS, Android).
* @param activationIds PowerAuth activation IDs.
* @return True if device registration was successful, false otherwise.
* @throws PushServerClientException In case of network, response / JSON processing, or other IO error.
*/
public boolean createDeviceForActivations(Long appId, String token, MobilePlatform platform, List<String> activationIds) throws PushServerClientException {
CreateDeviceForActivationsRequest request = new CreateDeviceForActivationsRequest();
request.setAppId(appId);
request.setToken(token);
request.setPlatform(platform.value());
request.getActivationIds().addAll(activationIds);
// Validate request on the client side.
String error = CreateDeviceRequestValidator.validate(request);
if (error != null) {
throw new PushServerClientException(error);
}
logger.info("Calling create device service, appId: {}, token: {}, platform: {} - start", appId, maskToken(token), platform.value());
Response response = postObjectImpl("/push/device/create/multi", new ObjectRequest<>(request));
logger.info("Calling create device service, appId: {}, token: {}, platform: {} - finish", appId, maskToken(token), platform.value());
return response.getStatus().equals(Response.Status.OK);
}
/**
* Remove device from the push server
*
* @param appId PowerAuth application app ID.
* @param token Token received from the push service provider.
* @return True if device removal was successful, false otherwise.
* @throws PushServerClientException In case of network, response / JSON processing, or other IO error.
*/
public boolean deleteDevice(Long appId, String token) throws PushServerClientException {
DeleteDeviceRequest request = new DeleteDeviceRequest();
request.setAppId(appId);
request.setToken(token);
// Validate request on the client side.
String error = DeleteDeviceRequestValidator.validate(request);
if (error != null) {
throw new PushServerClientException(error);
}
logger.info("Calling push server delete device service, appId: {}, token: {} - start", appId, maskToken(token));
Response response = postObjectImpl("/push/device/delete", new ObjectRequest<>(request));
logger.info("Calling push server delete device service, appId: {}, token: {} - finish", appId, maskToken(token));
return response.getStatus().equals(Response.Status.OK);
}
/**
* Update activation status for given device registration.
*
* @param activationId Identifier of activation
* @return True if updating went successful, false otherwise.
* @throws PushServerClientException In case of network, response / JSON processing, or other IO error.
*/
public boolean updateDeviceStatus(String activationId) throws PushServerClientException {
UpdateDeviceStatusRequest request = new UpdateDeviceStatusRequest();
request.setActivationId(activationId);
// Validate request on the client side.
String error = UpdateDeviceStatusRequestValidator.validate(request);
if (error != null) {
throw new PushServerClientException(error);
}
logger.info("Calling push server update device status, activation ID: {} - start", activationId);
// Note that there is just plain 'request' in the request, not 'new ObjectRequest<>(request)'.
// This is due to the fact that standard PowerAuth Server callback format is used here.
Response response = postImpl("/push/device/status/update", request, new ParameterizedTypeReference<Response>(){});
logger.info("Calling push server update device status, activation ID: {} - finish", activationId);
return response.getStatus().equals(Response.Status.OK);
}
/**
* Send a single push message to application with given ID.
*
* @param appId PowerAuth application app ID.
* @param pushMessage Push message to be sent.
* @return SendMessageResponse in case everything went OK, ErrorResponse in case of an error.
* @throws PushServerClientException In case of network, response / JSON processing, or other IO error.
*/
public ObjectResponse<PushMessageSendResult> sendPushMessage(Long appId, PushMessage pushMessage) throws PushServerClientException {
SendPushMessageRequest request = new SendPushMessageRequest();
request.setAppId(appId);
request.setMessage(pushMessage);
// Validate request on the client side.
String error = SendPushMessageRequestValidator.validate(request);
if (error != null) {
throw new PushServerClientException(error);
}
logger.info("Calling push server to send a push message, app ID: {}, user ID: {} - start", appId, pushMessage.getUserId());
final ObjectResponse<PushMessageSendResult> result = postObjectImpl("/push/message/send", new ObjectRequest<>(request), PushMessageSendResult.class);
logger.info("Calling push server to send a push message, app ID: {}, user ID: {} - finish", appId, pushMessage.getUserId());
return result;
}
/**
* Send a push message batch to application with given ID.
*
* @param appId PowerAuth application app ID.
* @param batch Push message batch to be sent.
* @return SendMessageResponse in case everything went OK, ErrorResponse in case of an error.
* @throws PushServerClientException In case of network, response / JSON processing, or other IO error.
*/
public ObjectResponse<PushMessageSendResult> sendPushMessageBatch(Long appId, List<PushMessage> batch) throws PushServerClientException {
SendPushMessageBatchRequest request = new SendPushMessageBatchRequest();
request.setAppId(appId);
request.setBatch(batch);
// Validate request on the client side.
String error = SendPushMessageBatchRequestValidator.validate(request);
if (error != null) {
throw new PushServerClientException(error);
}
logger.info("Calling push server to send a push message batch, app ID: {} - start", appId);
final ObjectResponse<PushMessageSendResult> result = postObjectImpl("/push/message/batch/send", new ObjectRequest<>(request), PushMessageSendResult.class);
logger.info("Calling push server to send a push message batch, app ID: {} - finish", appId);
return result;
}
/**
* Create a campaign.
*
* @param appId Application ID.
* @param message Message which attributes are defined in PushMessageBody.
* @return ID of new created campaign.
* @throws PushServerClientException In case of network, response / JSON processing, or other IO error.
*/
public ObjectResponse<CreateCampaignResponse> createCampaign(Long appId, PushMessageBody message) throws PushServerClientException {
CreateCampaignRequest request = new CreateCampaignRequest();
request.setAppId(appId);
request.setMessage(message);
// Validate request on the client side.
String error = CreateCampaignRequestValidator.validate(request);
if (error != null) {
throw new PushServerClientException(error);
}
logger.info("Calling push server to create a push campaign, app ID: {} - start", appId);
final ObjectResponse<CreateCampaignResponse> result = postObjectImpl("/push/campaign/create", new ObjectRequest<>(request), CreateCampaignResponse.class);
logger.info("Calling push server to create a push campaign, app ID: {} - finish", appId);
return result;
}
/**
* Delete a campaign specified with campaignId.
*
* @param campaignId Campaign ID.
* @return True if campaign is removed, false otherwise.
* @throws PushServerClientException In case of network, response / JSON processing, or other IO error.
*/
public boolean deleteCampaign(Long campaignId) throws PushServerClientException {
try {
String campaignIdSanitized = URLEncoder.encode(String.valueOf(campaignId), "UTF-8");
logger.info("Calling push server to delete a push campaign, campaign ID: {} - start", campaignId);
ObjectResponse<DeleteCampaignResponse> response = postObjectImpl("/push/campaign/" + campaignIdSanitized + "/delete", null, DeleteCampaignResponse.class);
logger.info("Calling push server to delete a push campaign, campaign ID: {} - finish", campaignId);
return response.getStatus().equals(Response.Status.OK);
} catch (UnsupportedEncodingException e) {
throw new PushServerClientException(new Error("PUSH_SERVER_CLIENT_ERROR", e.getMessage()));
}
}
/**
* Get list of campaigns, dependent on all param
*
* @param all true to get whole list, false to get campaigns that are only sent
* @return List of campaigns.
* @throws PushServerClientException In case of network, response / JSON processing, or other IO error.
*/
public ObjectResponse<ListOfCampaignsResponse> getListOfCampaigns(boolean all) throws PushServerClientException {
MultiValueMap<String, String> params = new LinkedMultiValueMap<>();
params.put("all", Collections.singletonList(Boolean.valueOf(all).toString()));
logger.info("Calling push server to obtain a push campaign list - start");
final ObjectResponse<ListOfCampaignsResponse> result = getObjectImpl("/push/campaign/list", params, ListOfCampaignsResponse.class);
logger.info("Calling push server to obtain a push campaign list - finish");
return result;
}
/**
* Get a campaign specified with campaignID.
*
* @param campaignId ID of campaign to get.
* @return Details of campaign, defined in CampaignResponse
* @throws PushServerClientException In case of network, response / JSON processing, or other IO error.
*/
public ObjectResponse<CampaignResponse> getCampaign(Long campaignId) throws PushServerClientException {
try {
String campaignIdSanitized = URLEncoder.encode(String.valueOf(campaignId), "UTF-8");
logger.info("Calling push server to obtain a push campaign detail, campaign ID: {} - start", campaignId);
final ObjectResponse<CampaignResponse> result = getObjectImpl("/push/campaign/" + campaignIdSanitized + "/detail", null, CampaignResponse.class);
logger.info("Calling push server to obtain a push campaign detail, campaign ID: {} - finish", campaignId);
return result;
} catch (UnsupportedEncodingException e) {
throw new PushServerClientException(new Error("PUSH_SERVER_CLIENT_ERROR", e.getMessage()));
}
}
/**
* Add a list of users to a specific campaign
*
* @param campaignId Identifier of campaign.
* @param users List of users to add.
* @return True if adding was successful, false otherwise.
* @throws PushServerClientException In case of network, response / JSON processing, or other IO error.
*/
public boolean addUsersToCampaign(Long campaignId, List<String> users) throws PushServerClientException {
try {
ListOfUsers listOfUsers = new ListOfUsers(users);
String campaignIdSanitized = URLEncoder.encode(String.valueOf(campaignId), "UTF-8");
logger.info("Calling push server to add users to campaign, campaign ID: {} - start", campaignId);
Response response = putObjectImpl("/push/campaign/" + campaignIdSanitized + "/user/add", new ObjectRequest<>(listOfUsers));
logger.info("Calling push server to add users to campaign, campaign ID: {} - finish", campaignId);
if (response == null) {
throw new PushServerClientException(new Error("PUSH_SERVER_CLIENT_ERROR", "Network communication has failed."));
}
return response.getStatus().equals(Response.Status.OK);
} catch (UnsupportedEncodingException e) {
throw new PushServerClientException(new Error("PUSH_SERVER_CLIENT_ERROR", e.getMessage()));
}
}
/**
* Get a list of users in paged format from specific campaign
*
* @param campaignId Identifier of campaign.
* @param page Page number.
* @param size Size of elements per page.
* @return Page of users specified with params.
* @throws PushServerClientException In case of network, response / JSON processing, or other IO error.
*/
public PagedResponse<ListOfUsersFromCampaignResponse> getListOfUsersFromCampaign(Long campaignId, int page, int size) throws PushServerClientException {
try {
String campaignIdSanitized = URLEncoder.encode(String.valueOf(campaignId), "UTF-8");
MultiValueMap<String, String> params = new LinkedMultiValueMap<>();
params.put("page", Collections.singletonList(Integer.valueOf(page).toString()));
params.put("size", Collections.singletonList(Integer.valueOf(size).toString()));
ParameterizedTypeReference<PagedResponse<ListOfUsersFromCampaignResponse>> typeReference = new ParameterizedTypeReference<PagedResponse<ListOfUsersFromCampaignResponse>>() {};
logger.info("Calling push server to get users from the campaign, campaign ID: {} - start", campaignId);
final PagedResponse<ListOfUsersFromCampaignResponse> result = getImpl("/push/campaign/" + campaignIdSanitized + "/user/list", params, typeReference);
logger.info("Calling push server to get users from the campaign, campaign ID: {} - finish", campaignId);
return result;
} catch (UnsupportedEncodingException e) {
throw new PushServerClientException(new Error("PUSH_SERVER_CLIENT_ERROR", e.getMessage()));
}
}
/**
* Delete a list of users from specific campaign.
*
* @param campaignId Identifier of campaign.
* @param users List of users' Identifiers to delete.
* @return True if deletion was successful, false otherwise.
* @throws PushServerClientException In case of network, response / JSON processing, or other IO error.
*/
public boolean deleteUsersFromCampaign(Long campaignId, List<String> users) throws PushServerClientException {
try {
ListOfUsers listOfUsers = new ListOfUsers(users);
String campaignIdSanitized = URLEncoder.encode(String.valueOf(campaignId), "UTF-8");
logger.info("Calling push server to remove users from the campaign, campaign ID: {} - start", campaignId);
Response response = postObjectImpl("/push/campaign/" + campaignIdSanitized + "/user/delete", new ObjectRequest<>(listOfUsers));
logger.info("Calling push server to remove users from the campaign, campaign ID: {} - finish", campaignId);
return response.getStatus().equals(Response.Status.OK);
} catch (UnsupportedEncodingException e) {
throw new PushServerClientException(new Error("PUSH_SERVER_CLIENT_ERROR", e.getMessage()));
}
}
/**
* Send a campaign on test user for trying its correctness.
*
* @param campaignId Identifier of campaign.
* @param userId Identifier of test user.
* @return True if sent, else otherwise.
* @throws PushServerClientException In case of network, response / JSON processing, or other IO error.
*/
public boolean sendTestCampaign(Long campaignId, String userId) throws PushServerClientException {
try {
String campaignIdSanitized = URLEncoder.encode(String.valueOf(campaignId), "UTF-8");
TestCampaignRequest request = new TestCampaignRequest();
request.setUserId(userId);
// Validate request on the client side.
String error = TestCampaignRequestValidator.validate(request);
if (error != null) {
throw new PushServerClientException(error);
}
logger.info("Calling push server to send test campaign, campaign ID: {}, user ID: {} - start", campaignId, userId);
Response response = postObjectImpl("/push/campaign/send/test/" + campaignIdSanitized, new ObjectRequest<>(request));
logger.info("Calling push server to send test campaign, campaign ID: {}, user ID: {} - finish", campaignId, userId);
return response.getStatus().equals(Response.Status.OK);
} catch (UnsupportedEncodingException e) {
throw new PushServerClientException(new Error("PUSH_SERVER_CLIENT_ERROR", e.getMessage()));
}
}
/**
* Send a specific campaign to users carrying this campaignID in PushCampaignUser model, but only once per device identified by token.
*
* @param campaignId Identifier of campaign.
* @return True if sent, else otherwise.
* @throws PushServerClientException In case of network, response / JSON processing, or other IO error.
*/
public boolean sendCampaign(Long campaignId) throws PushServerClientException {
try {
String campaignIdSanitized = URLEncoder.encode(String.valueOf(campaignId), "UTF-8");
logger.info("Calling push server to send a production campaign, campaign ID: {} - start", campaignId);
Response response = postObjectImpl("/push/campaign/send/live/" + campaignIdSanitized, null);
logger.info("Calling push server to send a production campaign, campaign ID: {} - finish", campaignId);
return response.getStatus().equals(Response.Status.OK);
} catch (UnsupportedEncodingException e) {
throw new PushServerClientException(new Error("PUSH_SERVER_CLIENT_ERROR", e.getMessage()));
}
}
/**
* Get list of application credentials entities.
* @return Application credentials entity list.
* @throws PushServerClientException Thrown when communication with Push Server fails.
*/
public ObjectResponse<GetApplicationListResponse> getApplicationList() throws PushServerClientException {
logger.info("Calling push server to retrieve list of applications - start");
final ObjectResponse<GetApplicationListResponse> response = getObjectImpl("/admin/app/list", null, GetApplicationListResponse.class);
logger.info("Calling push server to retrieve list of applications - finish");
return response;
}
/**
* Get list of applications which are not yet configured in Push Server but exist in PowerAuth server.
* @return List of applications which are not configured.
* @throws PushServerClientException Thrown when communication with Push Server fails.
*/
public ObjectResponse<GetApplicationListResponse> getUnconfiguredApplicationList() throws PushServerClientException {
logger.info("Calling push server to retrieve list of unconfigured applications - start");
final ObjectResponse<GetApplicationListResponse> response = getObjectImpl("/admin/app/unconfigured/list", null, GetApplicationListResponse.class);
logger.info("Calling push server to retrieve list of unconfigured applications - finish");
return response;
}
/**
* Get detail for an application credentials entity.
* @param id Application credentials entity ID.
* @param includeIos Whether to include iOS details.
* @param includeAndroid Whether to include Android details.
* @return Application credentials entity detail.
* @throws PushServerClientException Thrown when communication with Push Server fails.
*/
public ObjectResponse<GetApplicationDetailResponse> getApplicationDetail(Long id, boolean includeIos, boolean includeAndroid) throws PushServerClientException {
GetApplicationDetailRequest request = new GetApplicationDetailRequest(id, includeIos, includeAndroid);
logger.info("Calling push server to retrieve application detail, ID: {} - start", id);
final ObjectResponse<GetApplicationDetailResponse> response = postObjectImpl("/admin/app/detail", new ObjectRequest<>(request), GetApplicationDetailResponse.class);
logger.info("Calling push server to retrieve application detail, ID: {} - finish", id);
return response;
}
/**
* Create application credentials entity.
* @param appId PowerAuth application ID.
* @return Response with ID of created application credentials entity.
* @throws PushServerClientException Thrown when communication with Push Server fails.
*/
public ObjectResponse<CreateApplicationResponse> createApplication(Long appId) throws PushServerClientException {
final CreateApplicationRequest request = new CreateApplicationRequest(appId);
logger.info("Calling push server to create application, app ID: {} - start", appId);
final ObjectResponse<CreateApplicationResponse> response = postObjectImpl("/admin/app/create", new ObjectRequest<>(request), CreateApplicationResponse.class);
logger.info("Calling push server to create application, app ID: {} - finish", appId);
return response;
}
/**
* Update iOS details for an application credentials entity.
* @param id ID of application credentials entity.
* @param bundle The iOS bundle record.
* @param keyId The iOS key record.
* @param teamId The iOS team ID record.
* @param privateKey The iOS private key bytes.
* @return Response from server.
* @throws PushServerClientException Thrown when communication with Push Server fails.
*/
public Response updateIos(Long id, String bundle, String keyId, String teamId, byte[] privateKey) throws PushServerClientException {
final String privateKeyBase64 = BaseEncoding.base64().encode(privateKey);
final UpdateIosRequest request = new UpdateIosRequest(id, bundle, keyId, teamId, privateKeyBase64);
logger.info("Calling push server to update iOS, ID: {} - start", id);
final Response response = putObjectImpl("/admin/app/ios/update", new ObjectRequest<>(request));
logger.info("Calling push server to update iOS, ID: {} - finish", id);
return response;
}
/**
* Remove iOS record from an application credentials entity.
* @param id Application credentials entity ID.
* @return Response from server.
* @throws PushServerClientException Thrown when communication with Push Server fails.
*/
public Response removeIos(Long id) throws PushServerClientException {
final RemoveIosRequest request = new RemoveIosRequest(id);
logger.info("Calling push server to remove iOS, ID: {} - start", id);
final Response response = postObjectImpl("/admin/app/ios/remove", new ObjectRequest<>(request));
logger.info("Calling push server to remove iOS, ID: {} - finish", id);
return response;
}
/**
* Update Android details for an application credentials entity.
* @param id Application credentials entity ID.
* @param projectId The Android project ID record.
* @param privateKey The Android private key bytes.
* @return Response from server.
* @throws PushServerClientException Thrown when communication with Push Server fails.
*/
public Response updateAndroid(Long id, String projectId, byte[] privateKey) throws PushServerClientException {
final String privateKeyBase64 = BaseEncoding.base64().encode(privateKey);
final UpdateAndroidRequest request = new UpdateAndroidRequest(id, projectId, privateKeyBase64);
logger.info("Calling push server to update android, ID: {} - start", id);
final Response response = putObjectImpl("/admin/app/android/update", new ObjectRequest<>(request));
logger.info("Calling push server to update android, ID: {} - finish", id);
return response;
}
/**
* Remove Android record from an application credentials entity.
* @param id Application credentials entity ID.
* @return Response from server.
* @throws PushServerClientException Thrown when communication with Push Server fails.
*/
public Response removeAndroid(Long id) throws PushServerClientException {
final RemoveAndroidRequest request = new RemoveAndroidRequest(id);
logger.info("Calling push server to remove android, ID: {} - start", id);
final Response response = postObjectImpl("/admin/app/android/remove", new ObjectRequest<>(request));
logger.info("Calling push server to remove android, ID: {} - finish", id);
return response;
}
// Generic HTTP client methods
/**
* Prepare GET response.
*
* @param url specific url of method.
* @param params params to pass to url path, optional.
* @param typeReference response type reference.
* @return Object obtained after processing the response JSON.
* @throws PushServerClientException In case of network, response / JSON processing, or other IO error.
*
*/
private <T> T getImpl(String url, MultiValueMap<String, String> params, ParameterizedTypeReference<T> typeReference) throws PushServerClientException {
try {
return restClient.get(url, params, null, typeReference).getBody();
} catch (RestClientException ex) {
logger.warn(ex.getMessage(), ex);
throw new PushServerClientException(ex, new Error("PUSH_SERVER_CLIENT_ERROR", "HTTP GET request failed."));
}
}
/**
* Prepare GET object response.
*
* @param url specific url of method.
* @param params params to pass to url path, optional.
* @param responseType response type.
* @return Object obtained after processing the response JSON.
* @throws PushServerClientException In case of network, response / JSON processing, or other IO error.
*
*/
private <T> ObjectResponse<T> getObjectImpl(String url, MultiValueMap<String, String> params, Class<T> responseType) throws PushServerClientException {
try {
return restClient.getObject(url, params, null, responseType);
} catch (RestClientException ex) {
logger.warn(ex.getMessage(), ex);
throw new PushServerClientException(ex, new Error("PUSH_SERVER_CLIENT_ERROR", "HTTP GET request failed."));
}
}
/**
* Prepare a generic POST response.
*
* @param url specific url of method
* @param request request body
* @param typeReference type reference
* @return Object obtained after processing the response JSON.
* @throws PushServerClientException In case of network, response / JSON processing, or other IO error.
*/
private <T> T postImpl(String url, Object request, ParameterizedTypeReference<T> typeReference) throws PushServerClientException {
try {
return restClient.post(url, request, typeReference).getBody();
} catch (RestClientException ex) {
logger.warn(ex.getMessage(), ex);
throw new PushServerClientException(ex, new Error("PUSH_SERVER_CLIENT_ERROR", "HTTP POST request failed."));
}
}
/**
* Prepare POST object response. Uses default {@link Response} type reference for response.
*
* @param url specific url of method
* @param request request body
* @return Object obtained after processing the response JSON.
* @throws PushServerClientException In case of network, response / JSON processing, or other IO error.
*/
private Response postObjectImpl(String url, ObjectRequest<?> request) throws PushServerClientException {
try {
return restClient.postObject(url, request);
} catch (RestClientException ex) {
logger.warn(ex.getMessage(), ex);
throw new PushServerClientException(ex, new Error("PUSH_SERVER_CLIENT_ERROR", "HTTP POST request failed."));
}
}
/**
* Prepare POST object response.
*
* @param url specific url of method
* @param request request body
* @param responseType response type
* @return Object obtained after processing the response JSON.
* @throws PushServerClientException In case of network, response / JSON processing, or other IO error.
*/
private <T> ObjectResponse<T> postObjectImpl(String url, ObjectRequest<?> request, Class<T> responseType) throws PushServerClientException {
try {
return restClient.postObject(url, request, responseType);
} catch (RestClientException ex) {
logger.warn(ex.getMessage(), ex);
throw new PushServerClientException(ex, new Error("PUSH_SERVER_CLIENT_ERROR", "HTTP POST request failed."));
}
}
/**
* Prepare PUT object response. Uses default {@link Response} type reference for response.
*
* @param url specific url of method
* @param request request body
* @return Object obtained after processing the response JSON.
* @throws PushServerClientException In case of network, response / JSON processing, or other IO error.
*/
private Response putObjectImpl(String url, ObjectRequest<?> request) throws PushServerClientException {
try {
return restClient.putObject(url, request);
} catch (RestClientException ex) {
logger.warn(ex.getMessage(), ex);
throw new PushServerClientException(ex, new Error("PUSH_SERVER_CLIENT_ERROR", "HTTP POST request failed."));
}
}
/**
* Mask push service token to avoid leaking tokens in log files.
* @param token Push service token.
* @return Masked push service token.
*/
private String maskToken(String token) {
if (token == null || token.length() < 10) {
return token;
}
return token.substring(0, 10) + "...";
}
}
| |
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package org.jetbrains.plugins.gradle.util;
import com.intellij.codeInsight.AttachSourcesProvider;
import com.intellij.execution.executors.DefaultRunExecutor;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.externalSystem.model.execution.ExternalSystemTaskExecutionSettings;
import com.intellij.openapi.externalSystem.model.project.LibraryData;
import com.intellij.openapi.externalSystem.model.project.LibraryPathType;
import com.intellij.openapi.externalSystem.service.execution.ProgressExecutionMode;
import com.intellij.openapi.externalSystem.service.notification.ExternalSystemNotificationManager;
import com.intellij.openapi.externalSystem.service.notification.NotificationCategory;
import com.intellij.openapi.externalSystem.service.notification.NotificationData;
import com.intellij.openapi.externalSystem.service.notification.NotificationSource;
import com.intellij.openapi.externalSystem.task.TaskCallback;
import com.intellij.openapi.externalSystem.util.ExternalSystemApiUtil;
import com.intellij.openapi.externalSystem.util.ExternalSystemUtil;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.LibraryOrderEntry;
import com.intellij.openapi.roots.OrderRootType;
import com.intellij.openapi.roots.libraries.Library;
import com.intellij.openapi.util.ActionCallback;
import com.intellij.openapi.util.NlsSafe;
import com.intellij.openapi.util.UserDataHolderBase;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VfsUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.PsiFile;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.ApiStatus;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.plugins.gradle.execution.build.CachedModuleDataFinder;
import org.jetbrains.plugins.gradle.execution.target.GradleTargetUtil;
import org.jetbrains.plugins.gradle.service.GradleInstallationManager;
import org.jetbrains.plugins.gradle.service.execution.BuildLayoutParameters;
import org.jetbrains.plugins.gradle.service.project.GradleProjectResolverUtil;
import org.jetbrains.plugins.gradle.service.task.GradleTaskManager;
import org.jetbrains.plugins.gradle.settings.GradleSettings;
import java.io.File;
import java.io.IOException;
import java.util.*;
import java.util.function.Predicate;
import static com.intellij.jarFinder.InternetAttachSourceProvider.attachSourceJar;
import static org.jetbrains.plugins.gradle.service.project.GradleProjectResolverUtil.attachSourcesAndJavadocFromGradleCacheIfNeeded;
/**
* @author Vladislav.Soroka
*/
public class GradleAttachSourcesProvider implements AttachSourcesProvider {
@NotNull
@Override
public Collection<AttachSourcesAction> getActions(List<LibraryOrderEntry> orderEntries, PsiFile psiFile) {
Map<LibraryOrderEntry, Module> gradleModules = getGradleModules(orderEntries);
if (gradleModules.isEmpty()) return Collections.emptyList();
return Collections.singleton(new AttachSourcesAction() {
@Override
public String getName() {
return GradleBundle.message("gradle.action.download.sources");
}
@Override
public String getBusyText() {
return GradleBundle.message("gradle.action.download.sources.busy.text");
}
@Override
public ActionCallback perform(List<LibraryOrderEntry> orderEntries) {
Map<LibraryOrderEntry, Module> gradleModules = getGradleModules(orderEntries);
if (gradleModules.isEmpty()) return ActionCallback.REJECTED;
final ActionCallback resultWrapper = new ActionCallback();
Project project = psiFile.getProject();
Map.Entry<LibraryOrderEntry, Module> next = gradleModules.entrySet().iterator().next();
LibraryOrderEntry libraryOrderEntry = next.getKey();
Module module = next.getValue();
String libraryName = libraryOrderEntry.getLibraryName();
if (libraryName == null) return ActionCallback.REJECTED;
String artifactCoordinates = StringUtil.trimStart(libraryName, GradleConstants.SYSTEM_ID.getReadableName() + ": ");
if (StringUtil.equals(libraryName, artifactCoordinates)) return ActionCallback.REJECTED;
GradleModuleData gradleModuleData = CachedModuleDataFinder.getGradleModuleData(module);
if (gradleModuleData == null) return ActionCallback.REJECTED;
final String gradlePath = gradleModuleData.getGradlePath();
String sourceArtifactNotation = getSourcesArtifactNotation(artifactIdCandidate -> {
VirtualFile[] rootFiles = libraryOrderEntry.getRootFiles(OrderRootType.CLASSES);
return rootFiles.length == 0 || ContainerUtil.exists(rootFiles, file -> file.getName().startsWith(artifactIdCandidate));
}, artifactCoordinates);
final String sourcesLocationFilePath;
final File sourcesLocationFile;
try {
sourcesLocationFile = new File(FileUtil.createTempDirectory("sources", "loc"), "path.tmp");
sourcesLocationFilePath = StringUtil.escapeBackSlashes(sourcesLocationFile.getCanonicalPath());
Runtime.getRuntime().addShutdownHook(new Thread(() -> FileUtil.delete(sourcesLocationFile), "GradleAttachSourcesProvider cleanup"));
}
catch (IOException e) {
GradleLog.LOG.warn(e);
return ActionCallback.REJECTED;
}
final String taskName = "DownloadSources";
// @formatter:off
String initScript = "allprojects {\n" +
" afterEvaluate { project ->\n" +
" if(project.path == '" + gradlePath + "') {\n" +
" def overwrite = project.tasks.findByName('" + taskName + "') != null\n" +
" project.tasks.create(name: '" + taskName + "', overwrite: overwrite) {\n" +
" doLast {\n" +
" def configuration = null\n" +
" def repository = project.repositories.toList().find {\n" +
" logger.lifecycle('Attempt to download sources from ' + it.name)\n" +
" project.repositories.clear()\n" +
" project.repositories.add(it)\n" +
" configuration = project.configurations.create('downloadSourcesFrom_' + UUID.randomUUID())\n" +
" configuration.transitive = false\n" +
" project.dependencies.add(configuration.name, '" + sourceArtifactNotation + "')\n" +
" def files = null\n" +
" try {\n" +
" files = configuration.resolvedConfiguration.lenientConfiguration.getFiles()\n" +
" } catch (java.lang.Throwable ignore) { }\n" +
" return files && !files.isEmpty()\n" +
" }\n" +
" if (!repository) {\n" +
" configuration = project.configurations.create('downloadSources_' + UUID.randomUUID())\n" +
" configuration.transitive = false\n" +
" project.dependencies.add(configuration.name, '" + sourceArtifactNotation + "')\n" +
" configuration.resolve()\n" +
" }\n" +
" def sourcesPath = configuration?.singleFile?.path\n" +
" if (sourcesPath) {\n" +
" logger.lifecycle('Sources were downloaded to ' + sourcesPath)\n" +
" new File('" + sourcesLocationFilePath + "').write sourcesPath\n" +
" } else throw new RuntimeException('Sources download failed')\n" +
" }\n" +
" }\n" +
" }\n" +
" }\n" +
"}\n";
// @formatter:on
UserDataHolderBase userData = new UserDataHolderBase();
userData.putUserData(GradleTaskManager.INIT_SCRIPT_KEY, initScript);
String gradleVmOptions = GradleSettings.getInstance(project).getGradleVmOptions();
ExternalSystemTaskExecutionSettings settings = new ExternalSystemTaskExecutionSettings();
settings.setExecutionName(getName());
settings.setExternalProjectPath(gradleModuleData.getDirectoryToRunTask());
settings.setTaskNames(Collections.singletonList(gradleModuleData.getTaskPath(taskName, true)));
settings.setVmOptions(gradleVmOptions);
settings.setExternalSystemIdString(GradleConstants.SYSTEM_ID.getId());
ExternalSystemUtil.runTask(
settings, DefaultRunExecutor.EXECUTOR_ID, project, GradleConstants.SYSTEM_ID,
new TaskCallback() {
@Override
public void onSuccess() {
VirtualFile classesFile = libraryOrderEntry.getFiles(OrderRootType.CLASSES)[0];
File sourceJar = getSourceFile(artifactCoordinates, classesFile, project, settings.getExternalProjectPath());
if (sourceJar == null) {
try {
sourceJar = new File(FileUtil.loadFile(sourcesLocationFile));
FileUtil.delete(sourcesLocationFile);
}
catch (IOException e) {
GradleLog.LOG.warn(e);
}
}
File finalSourceJar = sourceJar;
ApplicationManager.getApplication().invokeLater(() -> {
final Set<Library> libraries = new HashSet<>();
for (LibraryOrderEntry orderEntry : orderEntries) {
ContainerUtil.addIfNotNull(libraries, orderEntry.getLibrary());
}
if (finalSourceJar != null) {
attachSourceJar(finalSourceJar, libraries);
}
resultWrapper.setDone();
});
}
@Override
public void onFailure() {
resultWrapper.setRejected();
String title = GradleBundle.message("gradle.notifications.sources.download.failed.title");
String message = GradleBundle.message("gradle.notifications.sources.download.failed.content", artifactCoordinates);
NotificationData notification =
new NotificationData(title, message, NotificationCategory.WARNING, NotificationSource.PROJECT_SYNC);
notification.setBalloonNotification(true);
ExternalSystemNotificationManager.getInstance(project).showNotification(GradleConstants.SYSTEM_ID, notification);
}
}, ProgressExecutionMode.IN_BACKGROUND_ASYNC, false, userData);
return resultWrapper;
}
});
}
@NotNull
@ApiStatus.Internal
static String getSourcesArtifactNotation(@NotNull Predicate<String> artifactIdChecker, String artifactCoordinates) {
String groupNameVersionCoordinates;
String[] split = artifactCoordinates.split(":");
if (split.length == 4) {
// group:name:packaging:classifier || name:packaging:classifier:version || group:name:classifier:version || group:name:packaging:version
boolean isArtifactId = artifactIdChecker.test(split[1]);
groupNameVersionCoordinates = isArtifactId ? split[0] + ":" + split[1] + ":" + split[3] : artifactCoordinates;
}
else if (split.length == 5) {
// group:name:packaging:classifier:version
groupNameVersionCoordinates = split[0] + ":" + split[1] + ":" + split[4];
}
else {
groupNameVersionCoordinates = artifactCoordinates;
}
return groupNameVersionCoordinates + ":sources";
}
@Nullable
private static File getSourceFile(@NotNull String artifactCoordinates,
VirtualFile classesFile,
@NotNull Project project,
@NotNull @NlsSafe String projectPath) {
LibraryData data = new LibraryData(GradleConstants.SYSTEM_ID, artifactCoordinates);
data.addPath(LibraryPathType.BINARY, VfsUtil.getLocalFile(classesFile).getPath());
BuildLayoutParameters buildLayoutParameters = GradleInstallationManager.getInstance().guessBuildLayoutParameters(project, projectPath);
String gradleUserHome = GradleTargetUtil.maybeGetLocalValue(buildLayoutParameters.getGradleUserHome());
if (gradleUserHome == null) return null;
attachSourcesAndJavadocFromGradleCacheIfNeeded(new File(gradleUserHome), data);
Iterator<String> iterator = data.getPaths(LibraryPathType.SOURCE).iterator();
return iterator.hasNext() ? new File(iterator.next()) : null;
}
private static Map<LibraryOrderEntry, Module> getGradleModules(List<LibraryOrderEntry> libraryOrderEntries) {
Map<LibraryOrderEntry, Module> result = new HashMap<>();
for (LibraryOrderEntry entry : libraryOrderEntries) {
if (entry.isModuleLevel()) continue;
Module module = entry.getOwnerModule();
if (ExternalSystemApiUtil.isExternalSystemAwareModule(GradleConstants.SYSTEM_ID, module)) {
result.put(entry, module);
}
}
return result;
}
}
| |
package com.evanmclean.erudite.instapaper;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.jsoup.Connection;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.nodes.Node;
import org.jsoup.parser.Tag;
import org.jsoup.select.Elements;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.evanmclean.erudite.config.TitleMunger;
import com.evanmclean.erudite.misc.Conn;
import com.evanmclean.erudite.sessions.Session;
import com.evanmclean.evlib.escape.Esc;
import com.evanmclean.evlib.lang.Str;
import com.evanmclean.evlib.util.Colls;
import com.evanmclean.evlib.util.TreeMapIgnoreCase;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSortedMap;
/**
* Represents a login to the Instapaper service.
*
* @author Evan M<sup>c</sup>Lean,
* <a href="http://evanmclean.com/" target="_blank">M<sup>c</sup>Lean
* Computer Services</a>
*/
public class Instapaper
{
private class Article implements InstapaperArticle
{
private final String title;
private final String originalUrl;
private final String summary;
private final String textUrl;
private final String archiveUrl;
private final String moveUrl;
private final String deleteUrl;
private Element _text;
Article( final String title, final String original_url,
final String summary, final String text_url, final String archive_url,
final String move_url, final String delete_url )
{
this.title = title;
this.originalUrl = original_url;
this.summary = summary;
this.textUrl = text_url;
this.archiveUrl = archive_url;
this.moveUrl = move_url;
this.deleteUrl = delete_url;
}
@SuppressWarnings( "synthetic-access" )
@Override
public void archive() throws IOException
{
LoggerFactory.getLogger(getClass())
.trace("Archiving article on Instapaper: {}", title);
connect(archiveUrl).execute();
}
@Override
public String getOriginalUrl()
{
return originalUrl;
}
@Override
public String getSourceUrl()
{
return "https://www.instapaper.com/text?u=" + Esc.url.text(originalUrl);
}
@Override
public String getSummary()
{
return summary;
}
@Override
public String getTitle()
{
return title;
}
@SuppressWarnings( "synthetic-access" )
@Override
public void move( final InstapaperFolder folder ) throws IOException
{
LoggerFactory.getLogger(getClass()).trace(
"Moving article on Instapaper to {} folder: {}", folder.getName(),
title);
final Folder fldr = (Folder) folder;
connect(moveUrl + fldr.getId()).execute();
}
@SuppressWarnings( "synthetic-access" )
@Override
public void remove() throws IOException
{
LoggerFactory.getLogger(getClass())
.trace("Removing article on Instapaper: {}", title);
connect(deleteUrl).method(Connection.Method.POST).execute();
}
@SuppressWarnings( "synthetic-access" )
@Override
public Element text() throws IOException
{
Element text = _text;
if ( text == null )
{
LoggerFactory.getLogger(getClass())
.trace("Retrieving article from Instapaper: {}", title);
final Document doc = connect(textUrl).get();
final Element story = doc.getElementById("story");
if ( story == null )
throw new HasInstapaperLayoutChangedException(
"Could not find div#story for article: " + title);
final List<Node> contents = story.childNodes();
switch ( contents.size() )
{
case 0:
throw new HasInstapaperLayoutChangedException(
"Empty div#story for article: " + title);
case 1:
{
Node node = contents.get(0);
if ( node instanceof Element )
{
text = (Element) node;
break;
}
}
//$FALL-THROUGH$
default:
{
text = new Element(Tag.valueOf("div"), Str.EMPTY);
// (Use defensive copy to avoid a ConcurrentModificationException)
for ( Node node : ImmutableList.copyOf(contents) )
text.appendChild(node);
}
}
_text = text;
}
return text.clone();
}
@Override
public String toString()
{
final StringBuilder buff = new StringBuilder(title);
buff.append("\n ").append(originalUrl) //
.append("\n Text: ").append(textUrl) //
.append("\n Archive: ").append(archiveUrl) //
.append("\n Move: ").append(moveUrl) //
.append("\n Delete: ").append(deleteUrl) //
;
if ( Str.isNotEmpty(summary) )
buff.append("\n\n ").append(summary);
return buff.toString();
}
}
private class Folder implements InstapaperFolder
{
private final String name;
private final String url;
private final String id;
private ImmutableList<InstapaperArticle> _articles;
Folder( final String name, final String url, final String id )
{
this.name = name;
this.url = url;
this.id = id;
}
@SuppressWarnings( "synthetic-access" )
@Override
public ImmutableList<InstapaperArticle> getArticles() throws IOException
{
ImmutableList<InstapaperArticle> articles = _articles;
if ( articles == null )
_articles = articles = _saveArticles(connect(url).get());
return articles;
}
@Override
public String getName()
{
return name;
}
@Override
public String toString()
{
return "InstapaperFolder(" + name + ", " + id + ", " + url + ')';
}
@SuppressWarnings( "synthetic-access" )
ImmutableList<InstapaperArticle> _saveArticles( final Document doc )
{
final Logger log = LoggerFactory.getLogger(getClass());
log.trace("Retrieving article list from Instapaper for folder {}.", name);
final ImmutableList.Builder<InstapaperArticle> bldr = ImmutableList
.builder();
for ( Element article : doc.getElementsByClass("article_item") )
{
final String title;
final String text_url;
{
final Element link = article.getElementsByClass("article_title")
.first();
if ( (link == null) || (!"a".equalsIgnoreCase(link.tagName())) )
continue;
{
title = titleMunger.munge(link.text());
final String href = link.attr("href");
if ( Str.isEmpty(title) || Str.isEmpty(href) )
continue;
text_url = BASE_URL + href;
}
}
final String source_url;
{
final Element container = article.getElementsByClass("host").first();
if ( container == null )
continue;
{
final Element link = container.getElementsByTag("a").first();
source_url = link.attr("href");
if ( Str.isEmpty(source_url) )
continue;
}
}
final String summary;
{
final Element smry = article.getElementsByClass("article_preview")
.first();
summary = (smry == null) ? Str.EMPTY : Str.trimToEmpty(smry.text());
}
final String archive_url;
final String delete_url;
final String move_url;
{
final Element container = article
.getElementsByClass("article_actions").first();
if ( container == null )
throw new HasInstapaperLayoutChangedException(
"Cannot find div.article_actions for article: " + title);
String au = null;
String du = null;
for ( Element link : container.getElementsByTag("a") )
{
if ( link.hasClass("js_archive_single") )
au = link.attr("href");
else if ( link.hasClass("js_delete_single") )
du = link.attr("href");
}
if ( Str.isEmpty(au) )
throw new HasInstapaperLayoutChangedException(
"Cannot find archive url for article: " + title);
if ( Str.isEmpty(du) || (du == null) )
throw new HasInstapaperLayoutChangedException(
"Cannot find delete url for article: " + title);
archive_url = BASE_URL + au;
delete_url = BASE_URL + du;
// This is a bit of a kludge: We just generate the fragment of the
// URL for moving to a folder.
move_url = BASE_URL + du.replace("delete", "move") + "/to/";
}
log.trace("Article: {}", title);
bldr.add(new Article(title, source_url, summary, text_url, archive_url,
move_url, delete_url));
}
final ImmutableList<InstapaperArticle> list = bldr.build();
return list;
}
String getId()
{
return id;
}
}
/**
* The name of the default (Read Later) folder on Instapaper.
*/
public static final String DEFAULT_FOLDER = "Read Later";
private static final String BASE_URL = "https://www.instapaper.com";
private static final String DEFAULT_URL = BASE_URL + "/u";
private static final String DEFAULT_ID = "0";
/**
* Create a {@link Session} for a login to the Instapaper service. Password is
* not stored by the application, only session cookies.
*
* @param email
* The user's email address.
* @param pass
* The user's password.
* @return A session that can be used to instantiate an {@link Instapaper}
* object or <code>null</code> if login failed.
* @throws IOException
*/
public static Session login( final String email, final String pass )
throws IOException
{
final Connection conn = Conn.connect(BASE_URL + "/user/login");
conn.data("username", email, "password", pass);
conn.method(Connection.Method.POST);
Connection.Response resp = conn.execute();
if ( resp.cookies().isEmpty() )
return null;
return new InstapaperSession(ImmutableMap.copyOf(resp.cookies()));
}
private final InstapaperSession session;
private final TitleMunger titleMunger;
private ImmutableMap<String, Folder> _folders;
/**
* Create a logged-in connection to Instapaper based on a session previously
* produced by {@link #login(String, String)}.
*
* @param session
* The session object to use.
* @param title_munger
* A title munger to use on article titles.
*/
public Instapaper( final Session session, final TitleMunger title_munger )
{
if ( !(session instanceof InstapaperSession) )
throw new IllegalArgumentException(
"Invalid session object for Instapaper.");
this.session = (InstapaperSession) session;
this.titleMunger = (title_munger != null) ? title_munger
: TitleMunger.empty();
}
/**
* Get a folder from Instapaper.
*
* @param name
* The name of the folder.
* @return The folder, or <code>null</code> if it does not exist.
* @throws IOException
*/
public InstapaperFolder getFolder( final String name ) throws IOException
{
return _getFolders().get(name);
}
/**
* A list of all the folders on Instapaper.
*
* @return A list of all the folders on Instapaper.
* @throws IOException
*/
public ImmutableList<InstapaperFolder> getFolders() throws IOException
{
return ImmutableList.<InstapaperFolder> copyOf(_getFolders().values());
}
/**
* Get the default folder (equivalent to
* <code>getFolder(DEFAULT_FOLDER)</code>.)
*
* @return The default folder.
* @throws IOException
*/
public InstapaperFolder getReadLaterFolder() throws IOException
{
return _getFolders().get(DEFAULT_FOLDER);
}
private ImmutableMap<String, Folder> _getFolders() throws IOException
{
ImmutableMap<String, Folder> folders = _folders;
if ( folders == null )
{
final Logger log = LoggerFactory.getLogger(getClass());
log.trace("Retrieving folder list from Instapaper.");
final Document doc = connect(DEFAULT_URL).get();
final TreeMapIgnoreCase<Folder> map = Colls.newTreeMapIgnoreCase();
{
final Folder folder = new Folder(DEFAULT_FOLDER, DEFAULT_URL,
DEFAULT_ID);
folder._saveArticles(doc);
log.trace("Folder: {} => {}", DEFAULT_FOLDER, DEFAULT_URL);
map.put(folder.getName(), folder);
}
final Elements folder_columns = doc.getElementsByClass("folder_link");
if ( (folder_columns != null) && (!folder_columns.isEmpty()) )
_getFolders(folder_columns, map);
_folders = folders = ImmutableSortedMap.copyOfSorted(map);
}
return folders;
}
private void _getFolders( final Elements els,
final TreeMapIgnoreCase<Folder> map )
{
if ( els == null )
return;
for ( final Element el : els )
if ( !"a".equalsIgnoreCase(el.tagName()) )
{
_getFolders(el.children(), map);
}
else
{
final String name = Str.trimToNull(el.text());
final String url = el.attr("href");
final String id;
{
final Matcher mat = Pattern.compile("\\/u\\/folder\\/(\\d+)\\/")
.matcher(url);
if ( mat.find() )
id = mat.group(1);
else
id = null;
}
if ( Str.isNotEmpty(name) && Str.isNotEmpty(url) && Str.isNotEmpty(id) )
{
LoggerFactory.getLogger(getClass()).trace("Folder: {} => {}", name,
url);
map.put(name, new Folder(name, url, id));
}
}
}
private Connection connect( final String url )
{
final Connection conn = Conn.connect(url);
for ( Map.Entry<String, String> entry : session.getCookies().entrySet() )
conn.cookie(entry.getKey(), entry.getValue());
return conn;
}
}
| |
/*
* Copyright (c) 2015-present, Horcrux.
* All rights reserved.
*
* This source code is licensed under the MIT-style license found in the
* LICENSE file in the root directory of this source tree.
*/
package com.horcrux.svg;
import android.annotation.SuppressLint;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.graphics.Path;
import android.graphics.Region;
import android.view.View;
import android.view.ViewParent;
import com.facebook.react.bridge.Dynamic;
import com.facebook.react.bridge.ReactContext;
import com.facebook.react.uimanager.annotations.ReactProp;
import java.util.ArrayList;
import javax.annotation.Nullable;
import static com.horcrux.svg.TextProperties.AlignmentBaseline;
import static com.horcrux.svg.TextProperties.TextLengthAdjust;
@SuppressLint("ViewConstructor")
class TextView extends GroupView {
SVGLength mInlineSize = null;
SVGLength mTextLength = null;
private String mBaselineShift = null;
TextLengthAdjust mLengthAdjust = TextLengthAdjust.spacing;
private AlignmentBaseline mAlignmentBaseline;
@Nullable private ArrayList<SVGLength> mPositionX;
@Nullable private ArrayList<SVGLength> mPositionY;
@Nullable private ArrayList<SVGLength> mRotate;
@Nullable private ArrayList<SVGLength> mDeltaX;
@Nullable private ArrayList<SVGLength> mDeltaY;
double cachedAdvance = Double.NaN;
public TextView(ReactContext reactContext) {
super(reactContext);
}
@Override
public void invalidate() {
if (mPath == null) {
return;
}
super.invalidate();
getTextContainer().clearChildCache();
}
void clearCache() {
cachedAdvance = Double.NaN;
super.clearCache();
}
@ReactProp(name = "inlineSize")
public void setInlineSize(Dynamic inlineSize) {
mInlineSize = SVGLength.from(inlineSize);
invalidate();
}
@ReactProp(name = "textLength")
public void setTextLength(Dynamic length) {
mTextLength = SVGLength.from(length);
invalidate();
}
@ReactProp(name = "lengthAdjust")
public void setLengthAdjust(@Nullable String adjustment) {
mLengthAdjust = TextLengthAdjust.valueOf(adjustment);
invalidate();
}
@ReactProp(name = "alignmentBaseline")
public void setMethod(@Nullable String alignment) {
mAlignmentBaseline = AlignmentBaseline.getEnum(alignment);
invalidate();
}
@ReactProp(name = "baselineShift")
public void setBaselineShift(Dynamic baselineShift) {
mBaselineShift = SVGLength.toString(baselineShift);
invalidate();
}
@ReactProp(name = "verticalAlign")
public void setVerticalAlign(@Nullable String verticalAlign) {
if (verticalAlign != null) {
verticalAlign = verticalAlign.trim();
int i = verticalAlign.lastIndexOf(' ');
try {
mAlignmentBaseline = AlignmentBaseline.getEnum(verticalAlign.substring(i));
} catch (IllegalArgumentException e) {
mAlignmentBaseline = AlignmentBaseline.baseline;
}
try {
mBaselineShift = verticalAlign.substring(0, i);
} catch (IndexOutOfBoundsException e) {
mBaselineShift = null;
}
} else {
mAlignmentBaseline = AlignmentBaseline.baseline;
mBaselineShift = null;
}
invalidate();
}
@ReactProp(name = "rotate")
public void setRotate(Dynamic rotate) {
mRotate = SVGLength.arrayFrom(rotate);
invalidate();
}
@ReactProp(name = "dx")
public void setDeltaX(Dynamic deltaX) {
mDeltaX = SVGLength.arrayFrom(deltaX);
invalidate();
}
@ReactProp(name = "dy")
public void setDeltaY(Dynamic deltaY) {
mDeltaY = SVGLength.arrayFrom(deltaY);
invalidate();
}
@ReactProp(name = "x")
public void setPositionX(Dynamic positionX) {
mPositionX = SVGLength.arrayFrom(positionX);
invalidate();
}
@ReactProp(name = "y")
public void setPositionY(Dynamic positionY) {
mPositionY = SVGLength.arrayFrom(positionY);
invalidate();
}
@Override
void draw(Canvas canvas, Paint paint, float opacity) {
setupGlyphContext(canvas);
clip(canvas, paint);
getGroupPath(canvas, paint);
pushGlyphContext();
drawGroup(canvas, paint, opacity);
popGlyphContext();
}
@Override
Path getPath(Canvas canvas, Paint paint) {
if (mPath != null) {
return mPath;
}
setupGlyphContext(canvas);
return getGroupPath(canvas, paint);
}
@Override
Path getPath(Canvas canvas, Paint paint, Region.Op op) {
return getPath(canvas, paint);
}
AlignmentBaseline getAlignmentBaseline() {
if (mAlignmentBaseline == null) {
ViewParent parent = this.getParent();
while (parent != null) {
if (parent instanceof TextView) {
TextView node = (TextView)parent;
final AlignmentBaseline baseline = node.mAlignmentBaseline;
if (baseline != null) {
mAlignmentBaseline = baseline;
return baseline;
}
}
parent = parent.getParent();
}
}
if (mAlignmentBaseline == null) {
mAlignmentBaseline = AlignmentBaseline.baseline;
}
return mAlignmentBaseline;
}
String getBaselineShift() {
if (mBaselineShift == null) {
ViewParent parent = this.getParent();
while (parent != null) {
if (parent instanceof TextView) {
TextView node = (TextView)parent;
final String baselineShift = node.mBaselineShift;
if (baselineShift != null) {
mBaselineShift = baselineShift;
return baselineShift;
}
}
parent = parent.getParent();
}
}
return mBaselineShift;
}
Path getGroupPath(Canvas canvas, Paint paint) {
if (mPath != null) {
return mPath;
}
pushGlyphContext();
mPath = super.getPath(canvas, paint);
popGlyphContext();
return mPath;
}
@Override
void pushGlyphContext() {
boolean isTextNode = !(this instanceof TextPathView) && !(this instanceof TSpanView);
getTextRootGlyphContext().pushContext(isTextNode, this, mFont, mPositionX, mPositionY, mDeltaX, mDeltaY, mRotate);
}
TextView getTextAnchorRoot() {
GlyphContext gc = getTextRootGlyphContext();
ArrayList<FontData> font = gc.mFontContext;
TextView node = this;
ViewParent parent = this.getParent();
for (int i = font.size() - 1; i >= 0; i--) {
if (!(parent instanceof TextView) || font.get(i).textAnchor == TextProperties.TextAnchor.start || node.mPositionX != null) {
return node;
}
node = (TextView) parent;
parent = node.getParent();
}
return node;
}
double getSubtreeTextChunksTotalAdvance(Paint paint) {
if (!Double.isNaN(cachedAdvance)) {
return cachedAdvance;
}
double advance = 0;
for (int i = 0; i < getChildCount(); i++) {
View child = getChildAt(i);
if (child instanceof TextView) {
TextView text = (TextView) child;
advance += text.getSubtreeTextChunksTotalAdvance(paint);
}
}
cachedAdvance = advance;
return advance;
}
TextView getTextContainer() {
TextView node = this;
ViewParent parent = this.getParent();
while (parent instanceof TextView) {
node = (TextView) parent;
parent = node.getParent();
}
return node;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.translator;
import java.util.Map;
import org.apache.asterix.common.metadata.DataverseName;
import org.apache.asterix.external.feed.management.FeedConnectionRequest;
import org.apache.asterix.lang.common.base.Expression;
import org.apache.asterix.lang.common.base.Statement;
import org.apache.asterix.lang.common.expression.VariableExpr;
import org.apache.asterix.lang.common.statement.Query;
import org.apache.asterix.metadata.entities.Dataset;
import org.apache.asterix.metadata.entities.Index;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.api.exceptions.SourceLocation;
/**
* An AQL statement instance is translated into an instance of type CompileX
* that has additional fields for use by the AqlTranslator.
*/
public class CompiledStatements {
public interface ICompiledStatement {
Statement.Kind getKind();
SourceLocation getSourceLocation();
}
public static abstract class AbstractCompiledStatement implements ICompiledStatement {
private SourceLocation sourceLoc;
public void setSourceLocation(SourceLocation sourceLoc) {
this.sourceLoc = sourceLoc;
}
public SourceLocation getSourceLocation() {
return sourceLoc;
}
}
public static class CompiledDatasetDropStatement extends AbstractCompiledStatement {
private final String dataverseName;
private final String datasetName;
public CompiledDatasetDropStatement(String dataverseName, String datasetName) {
this.dataverseName = dataverseName;
this.datasetName = datasetName;
}
public String getDataverseName() {
return dataverseName;
}
public String getDatasetName() {
return datasetName;
}
@Override
public Statement.Kind getKind() {
return Statement.Kind.DATASET_DROP;
}
}
// added by yasser
public static class CompiledCreateDataverseStatement extends AbstractCompiledStatement {
private final String dataverseName;
private final String format;
public CompiledCreateDataverseStatement(String dataverseName, String format) {
this.dataverseName = dataverseName;
this.format = format;
}
public String getDataverseName() {
return dataverseName;
}
public String getFormat() {
return format;
}
@Override
public Statement.Kind getKind() {
return Statement.Kind.CREATE_DATAVERSE;
}
}
public static class CompiledNodeGroupDropStatement extends AbstractCompiledStatement {
private final String nodeGroupName;
public CompiledNodeGroupDropStatement(String nodeGroupName) {
this.nodeGroupName = nodeGroupName;
}
public String getNodeGroupName() {
return nodeGroupName;
}
@Override
public Statement.Kind getKind() {
return Statement.Kind.NODEGROUP_DROP;
}
}
public static class CompiledIndexDropStatement extends AbstractCompiledStatement {
private final String dataverseName;
private final String datasetName;
private final String indexName;
public CompiledIndexDropStatement(String dataverseName, String datasetName, String indexName) {
this.dataverseName = dataverseName;
this.datasetName = datasetName;
this.indexName = indexName;
}
public String getDataverseName() {
return dataverseName;
}
public String getDatasetName() {
return datasetName;
}
public String getIndexName() {
return indexName;
}
@Override
public Statement.Kind getKind() {
return Statement.Kind.INDEX_DROP;
}
}
public static class CompiledDataverseDropStatement extends AbstractCompiledStatement {
private final String dataverseName;
private final boolean ifExists;
public CompiledDataverseDropStatement(String dataverseName, boolean ifExists) {
this.dataverseName = dataverseName;
this.ifExists = ifExists;
}
public String getDataverseName() {
return dataverseName;
}
public boolean getIfExists() {
return ifExists;
}
@Override
public Statement.Kind getKind() {
return Statement.Kind.DATAVERSE_DROP;
}
}
public static class CompiledTypeDropStatement extends AbstractCompiledStatement {
private final String typeName;
public CompiledTypeDropStatement(String nodeGroupName) {
this.typeName = nodeGroupName;
}
public String getTypeName() {
return typeName;
}
@Override
public Statement.Kind getKind() {
return Statement.Kind.TYPE_DROP;
}
}
public interface ICompiledDmlStatement extends ICompiledStatement {
DataverseName getDataverseName();
String getDatasetName();
}
public static class CompiledCreateIndexStatement extends AbstractCompiledStatement
implements ICompiledDmlStatement {
private final Dataset dataset;
private final Index index;
public CompiledCreateIndexStatement(Dataset dataset, Index index) {
this.dataset = dataset;
this.index = index;
}
@Override
public String getDatasetName() {
return index.getDatasetName();
}
@Override
public DataverseName getDataverseName() {
return index.getDataverseName();
}
public Index getIndex() {
return index;
}
public Dataset getDataset() {
return dataset;
}
@Override
public Statement.Kind getKind() {
return Statement.Kind.CREATE_INDEX;
}
}
public static class CompiledLoadFromFileStatement extends AbstractCompiledStatement
implements ICompiledDmlStatement {
private final DataverseName dataverseName;
private final String datasetName;
private final boolean alreadySorted;
private final String adapter;
private final Map<String, String> properties;
public CompiledLoadFromFileStatement(DataverseName dataverseName, String datasetName, String adapter,
Map<String, String> properties, boolean alreadySorted) {
this.dataverseName = dataverseName;
this.datasetName = datasetName;
this.alreadySorted = alreadySorted;
this.adapter = adapter;
this.properties = properties;
}
@Override
public DataverseName getDataverseName() {
return dataverseName;
}
@Override
public String getDatasetName() {
return datasetName;
}
public boolean alreadySorted() {
return alreadySorted;
}
public String getAdapter() {
return adapter;
}
public Map<String, String> getProperties() {
return properties;
}
@Override
public Statement.Kind getKind() {
return Statement.Kind.LOAD;
}
}
public static class CompiledInsertStatement extends AbstractCompiledStatement implements ICompiledDmlStatement {
private final DataverseName dataverseName;
private final String datasetName;
private final Query query;
private final int varCounter;
private final VariableExpr var;
private final Expression returnExpression;
public CompiledInsertStatement(DataverseName dataverseName, String datasetName, Query query, int varCounter,
VariableExpr var, Expression returnExpression) {
this.dataverseName = dataverseName;
this.datasetName = datasetName;
this.query = query;
this.varCounter = varCounter;
this.var = var;
this.returnExpression = returnExpression;
}
@Override
public DataverseName getDataverseName() {
return dataverseName;
}
@Override
public String getDatasetName() {
return datasetName;
}
public int getVarCounter() {
return varCounter;
}
public Query getQuery() {
return query;
}
public VariableExpr getVar() {
return var;
}
public Expression getReturnExpression() {
return returnExpression;
}
@Override
public Statement.Kind getKind() {
return Statement.Kind.INSERT;
}
}
public static class CompiledUpsertStatement extends CompiledInsertStatement {
public CompiledUpsertStatement(DataverseName dataverseName, String datasetName, Query query, int varCounter,
VariableExpr var, Expression returnExpression) {
super(dataverseName, datasetName, query, varCounter, var, returnExpression);
}
@Override
public Statement.Kind getKind() {
return Statement.Kind.UPSERT;
}
}
public static class CompiledSubscribeFeedStatement extends AbstractCompiledStatement
implements ICompiledDmlStatement {
private FeedConnectionRequest request;
private final int varCounter;
public CompiledSubscribeFeedStatement(FeedConnectionRequest request, int varCounter) {
this.request = request;
this.varCounter = varCounter;
}
@Override
public DataverseName getDataverseName() {
return request.getReceivingFeedId().getDataverseName();
}
public String getFeedName() {
return request.getReceivingFeedId().getEntityName();
}
@Override
public String getDatasetName() {
return request.getTargetDataset();
}
public int getVarCounter() {
return varCounter;
}
@Override
public Statement.Kind getKind() {
return Statement.Kind.SUBSCRIBE_FEED;
}
}
public static class CompiledDeleteStatement extends AbstractCompiledStatement implements ICompiledDmlStatement {
private final DataverseName dataverseName;
private final String datasetName;
private final Expression condition;
private final int varCounter;
private final Query query;
public CompiledDeleteStatement(VariableExpr var, DataverseName dataverseName, String datasetName,
Expression condition, int varCounter, Query query) {
this.dataverseName = dataverseName;
this.datasetName = datasetName;
this.condition = condition;
this.varCounter = varCounter;
this.query = query;
}
@Override
public String getDatasetName() {
return datasetName;
}
@Override
public DataverseName getDataverseName() {
return dataverseName;
}
public int getVarCounter() {
return varCounter;
}
public Expression getCondition() {
return condition;
}
public Query getQuery() throws AlgebricksException {
return query;
}
@Override
public Statement.Kind getKind() {
return Statement.Kind.DELETE;
}
}
public static class CompiledCompactStatement extends AbstractCompiledStatement {
private final DataverseName dataverseName;
private final String datasetName;
public CompiledCompactStatement(DataverseName dataverseName, String datasetName) {
this.dataverseName = dataverseName;
this.datasetName = datasetName;
}
public DataverseName getDataverseName() {
return dataverseName;
}
public String getDatasetName() {
return datasetName;
}
@Override
public Statement.Kind getKind() {
return Statement.Kind.COMPACT;
}
}
public static class CompiledIndexCompactStatement extends CompiledCompactStatement {
private Dataset dataset;
private Index index;
public CompiledIndexCompactStatement(Dataset dataset, Index index) {
super(dataset.getDataverseName(), dataset.getDatasetName());
this.dataset = dataset;
this.index = index;
}
public Dataset getDataset() {
return dataset;
}
public Index getIndex() {
return index;
}
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.mturk.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Name of the parameter from the Review policy.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/PolicyParameter" target="_top">AWS
* API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class PolicyParameter implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* Name of the parameter from the list of Review Polices.
* </p>
*/
private String key;
/**
* <p>
* The list of values of the Parameter
* </p>
*/
private java.util.List<String> values;
/**
* <p>
* List of ParameterMapEntry objects.
* </p>
*/
private java.util.List<ParameterMapEntry> mapEntries;
/**
* <p>
* Name of the parameter from the list of Review Polices.
* </p>
*
* @param key
* Name of the parameter from the list of Review Polices.
*/
public void setKey(String key) {
this.key = key;
}
/**
* <p>
* Name of the parameter from the list of Review Polices.
* </p>
*
* @return Name of the parameter from the list of Review Polices.
*/
public String getKey() {
return this.key;
}
/**
* <p>
* Name of the parameter from the list of Review Polices.
* </p>
*
* @param key
* Name of the parameter from the list of Review Polices.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public PolicyParameter withKey(String key) {
setKey(key);
return this;
}
/**
* <p>
* The list of values of the Parameter
* </p>
*
* @return The list of values of the Parameter
*/
public java.util.List<String> getValues() {
return values;
}
/**
* <p>
* The list of values of the Parameter
* </p>
*
* @param values
* The list of values of the Parameter
*/
public void setValues(java.util.Collection<String> values) {
if (values == null) {
this.values = null;
return;
}
this.values = new java.util.ArrayList<String>(values);
}
/**
* <p>
* The list of values of the Parameter
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setValues(java.util.Collection)} or {@link #withValues(java.util.Collection)} if you want to override the
* existing values.
* </p>
*
* @param values
* The list of values of the Parameter
* @return Returns a reference to this object so that method calls can be chained together.
*/
public PolicyParameter withValues(String... values) {
if (this.values == null) {
setValues(new java.util.ArrayList<String>(values.length));
}
for (String ele : values) {
this.values.add(ele);
}
return this;
}
/**
* <p>
* The list of values of the Parameter
* </p>
*
* @param values
* The list of values of the Parameter
* @return Returns a reference to this object so that method calls can be chained together.
*/
public PolicyParameter withValues(java.util.Collection<String> values) {
setValues(values);
return this;
}
/**
* <p>
* List of ParameterMapEntry objects.
* </p>
*
* @return List of ParameterMapEntry objects.
*/
public java.util.List<ParameterMapEntry> getMapEntries() {
return mapEntries;
}
/**
* <p>
* List of ParameterMapEntry objects.
* </p>
*
* @param mapEntries
* List of ParameterMapEntry objects.
*/
public void setMapEntries(java.util.Collection<ParameterMapEntry> mapEntries) {
if (mapEntries == null) {
this.mapEntries = null;
return;
}
this.mapEntries = new java.util.ArrayList<ParameterMapEntry>(mapEntries);
}
/**
* <p>
* List of ParameterMapEntry objects.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setMapEntries(java.util.Collection)} or {@link #withMapEntries(java.util.Collection)} if you want to
* override the existing values.
* </p>
*
* @param mapEntries
* List of ParameterMapEntry objects.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public PolicyParameter withMapEntries(ParameterMapEntry... mapEntries) {
if (this.mapEntries == null) {
setMapEntries(new java.util.ArrayList<ParameterMapEntry>(mapEntries.length));
}
for (ParameterMapEntry ele : mapEntries) {
this.mapEntries.add(ele);
}
return this;
}
/**
* <p>
* List of ParameterMapEntry objects.
* </p>
*
* @param mapEntries
* List of ParameterMapEntry objects.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public PolicyParameter withMapEntries(java.util.Collection<ParameterMapEntry> mapEntries) {
setMapEntries(mapEntries);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getKey() != null)
sb.append("Key: ").append(getKey()).append(",");
if (getValues() != null)
sb.append("Values: ").append(getValues()).append(",");
if (getMapEntries() != null)
sb.append("MapEntries: ").append(getMapEntries());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof PolicyParameter == false)
return false;
PolicyParameter other = (PolicyParameter) obj;
if (other.getKey() == null ^ this.getKey() == null)
return false;
if (other.getKey() != null && other.getKey().equals(this.getKey()) == false)
return false;
if (other.getValues() == null ^ this.getValues() == null)
return false;
if (other.getValues() != null && other.getValues().equals(this.getValues()) == false)
return false;
if (other.getMapEntries() == null ^ this.getMapEntries() == null)
return false;
if (other.getMapEntries() != null && other.getMapEntries().equals(this.getMapEntries()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getKey() == null) ? 0 : getKey().hashCode());
hashCode = prime * hashCode + ((getValues() == null) ? 0 : getValues().hashCode());
hashCode = prime * hashCode + ((getMapEntries() == null) ? 0 : getMapEntries().hashCode());
return hashCode;
}
@Override
public PolicyParameter clone() {
try {
return (PolicyParameter) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.mturk.model.transform.PolicyParameterMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.compile;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.Map;
import org.apache.drill.common.config.DrillConfig;
import org.apache.drill.common.util.DrillStringUtils;
import org.apache.drill.exec.compile.ClassTransformer.ClassNames;
import org.apache.drill.exec.exception.ClassTransformationException;
import org.apache.drill.exec.expr.CodeGenerator;
import org.apache.drill.exec.server.options.OptionManager;
import org.codehaus.commons.compiler.CompileException;
import org.objectweb.asm.tree.ClassNode;
import com.google.common.collect.Maps;
/**
* Implements the "plain Java" method of code generation and
* compilation. Given a {@link CodeGenerator}, obtains the generated
* source code, compiles it with the selected compiler, loads the
* byte-codes into a class loader and provides the resulting
* class. Compared with the {@link ClassTransformer} mechanism,
* this one requires the code generator to have generated a complete
* Java class that is capable of direct compilation and loading.
* This means the generated class must be a subclass of the template
* so that the JVM can use normal Java inheritance to associate the
* template and generated methods.
* <p>
* Here is how to use the plain Java technique to debug
* generated code:
* <ul>
* <li>Set the config option <tt>drill.exec.compile.code_dir</tt>
* to the location where you want to save the generated source
* code.</li>
* <li>Where you generate code (using a {@link CodeGenerator}),
* set the "plain Java" options:<pre>
* CodeGenerator<Foo> cg = ...
* cg.plainJavaCapable(true); // Class supports plain Java
* cg.preferPlainJava(true); // Actually generate plain Java
* cg.saveCodeForDebugging(true); // Save code for debugging
* ...</pre>
* Note that <tt>saveCodeForDebugging</tt> automatically sets the PJ
* option if the generator is capable. Call <tt>preferPlainJava</tt>
* only if you want to try PJ for this particular generated class
* without saving the generated code.</li>
* <li>In your favorite IDE, add to the code lookup path the
* code directory saved earlier. In Eclipse, for example, you do
* this in the debug configuration you will use to debug Drill.</li>
* <li>Set a breakpoint in template used for the generated code.</li>
* <li>Run Drill. The IDE will stop at your breakpoint.</li>
* <li>Step into the generated code. Examine class field and
* local variables. Have fun!</li>
* </ul>
* <p>
* Most generated classes have been upgraded to support Plain Java
* compilation. Once this work is complete, the calls to
* <tt>plainJavaCapable<tt> can be removed as all generated classes
* will be capable.
* <p>
* The setting to prefer plain Java is ignored for any remaining generated
* classes not marked as plain Java capable.
*/
public class ClassBuilder {
private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ClassBuilder.class);
public static final String CODE_DIR_OPTION = CodeCompiler.COMPILE_BASE + ".code_dir";
private final DrillConfig config;
private final OptionManager options;
private final File codeDir;
public ClassBuilder(DrillConfig config, OptionManager optionManager) {
this.config = config;
options = optionManager;
// Code can be saved per-class to enable debugging.
// Just mark the code generator as to be persisted,
// point your debugger to the directory set below, and you
// can step into the code for debugging. Code is not saved
// be default because doing so is expensive and unnecessary.
codeDir = new File(config.getString(CODE_DIR_OPTION));
}
/**
* Given a code generator which has already generated plain Java
* code, compile the code, create a class loader, and return the
* resulting Java class.
*
* @param cg a plain Java capable code generator that has generated
* plain Java code
* @return the class that the code generator defines
* @throws ClassTransformationException
*/
public Class<?> getImplementationClass(CodeGenerator<?> cg) throws ClassTransformationException {
try {
return compileClass(cg);
} catch (CompileException | ClassNotFoundException|IOException e) {
throw new ClassTransformationException(e);
}
}
/**
* Performs the actual work of compiling the code and loading the class.
*
* @param cg the code generator that has built the class(es) to be generated.
* @return the class, after code generation and (if needed) compilation.
* @throws IOException if an error occurs when optionally writing code to disk.
* @throws CompileException if the generated code has compile issues.
* @throws ClassNotFoundException if the generated code references unknown classes.
* @throws ClassTransformationException generic "something is wrong" error from
* Drill class compilation code.
*/
@SuppressWarnings("resource")
private Class<?> compileClass(CodeGenerator<?> cg) throws IOException, CompileException, ClassNotFoundException, ClassTransformationException {
final long t1 = System.nanoTime();
// Get the plain Java code.
String code = cg.getGeneratedCode();
// Get the class names (dotted, file path, etc.)
String className = cg.getMaterializedClassName();
ClassTransformer.ClassNames name = new ClassTransformer.ClassNames(className);
// A key advantage of this method is that the code can be
// saved and debugged, if needed.
if (cg.isCodeToBeSaved()) {
saveCode(code, name);
}
// Compile the code and load it into a class loader.
CachedClassLoader classLoader = new CachedClassLoader();
ClassCompilerSelector compilerSelector = new ClassCompilerSelector(classLoader, config, options);
Map<String,byte[]> results = compilerSelector.compile(name, code);
classLoader.addClasses(results);
long totalBytecodeSize = 0;
for (byte[] clazz : results.values()) {
totalBytecodeSize += clazz.length;
}
logger.debug("Compiled {}: bytecode size = {}, time = {} ms.",
cg.getClassName(),
DrillStringUtils.readable(totalBytecodeSize),
(System.nanoTime() - t1 + 500_000) / 1_000_000);
// Get the class from the class loader.
try {
return classLoader.findClass(className);
} catch (ClassNotFoundException e) {
// This should never occur.
throw new IllegalStateException("Code load failed", e);
}
}
/**
* Save code to a predefined location for debugging. To use the code
* for debugging, make sure the save location is on your IDE's source
* code search path. Code is saved in usual Java format with each
* package as a directory. The provided code directory becomes a
* source directory, as in Maven's "src/main/java".
*
* @param code the source code
* @param name the class name
*/
private void saveCode(String code, ClassNames name) {
String pathName = name.slash + ".java";
File codeFile = new File(codeDir, pathName);
codeFile.getParentFile().mkdirs();
try (final FileWriter writer = new FileWriter(codeFile)) {
writer.write(code);
} catch (IOException e) {
System.err.println("Could not save: " + codeFile.getAbsolutePath());
}
}
}
| |
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.pkgcache;
import com.google.common.base.Stopwatch;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Sets;
import com.google.common.eventbus.EventBus;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.cmdline.ResolvedTargets;
import com.google.devtools.build.lib.cmdline.TargetParsingException;
import com.google.devtools.build.lib.events.DelegatingEventHandler;
import com.google.devtools.build.lib.events.Event;
import com.google.devtools.build.lib.events.EventHandler;
import com.google.devtools.build.lib.packages.NoSuchPackageException;
import com.google.devtools.build.lib.packages.Target;
import com.google.devtools.build.lib.packages.TestTargetUtils;
import com.google.devtools.build.lib.util.Preconditions;
import com.google.devtools.build.lib.vfs.PathFragment;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.logging.Logger;
import javax.annotation.Nullable;
/**
* Implements the loading phase; responsible for:
* <ul>
* <li>target pattern evaluation
* <li>test suite expansion
* <li>loading the labels needed to construct the build configuration
* <li>loading the labels needed for the analysis with the build configuration
* <li>loading the transitive closure of the targets and the configuration labels
* </ul>
*
* <p>In order to ensure correctness of incremental loading and of full cache hits, this class is
* very restrictive about access to its internal state and to its collaborators. In particular, none
* of the collaborators of this class may change in incompatible ways, such as changing the relative
* working directory for the target pattern parser, without notifying this class.
*
* <p>For full caching, this class tracks the exact values of all inputs to the loading phase. To
* maximize caching, it is vital that these change as rarely as possible.
*
* <p>The Skyframe-based re-implementation of this class is in TargetPatternPhaseFunction.
*/
public final class LegacyLoadingPhaseRunner extends LoadingPhaseRunner {
private static final class ParseFailureListenerImpl extends DelegatingEventHandler
implements ParseFailureListener {
private final EventBus eventBus;
private ParseFailureListenerImpl(EventHandler delegate, EventBus eventBus) {
super(delegate);
this.eventBus = eventBus;
}
@Override
public void parsingError(String targetPattern, String message) {
if (eventBus != null) {
eventBus.post(new ParsingFailedEvent(targetPattern, message));
}
}
}
private static final Logger LOG = Logger.getLogger(LoadingPhaseRunner.class.getName());
private final PackageManager packageManager;
private final TargetPatternEvaluator targetPatternEvaluator;
private final Set<String> ruleNames;
public LegacyLoadingPhaseRunner(PackageManager packageManager,
Set<String> ruleNames) {
this.packageManager = packageManager;
this.targetPatternEvaluator = packageManager.newTargetPatternEvaluator();
this.ruleNames = ruleNames;
}
/**
* Performs target pattern evaluation, test suite expansion (if requested), and loads the
* transitive closure of the resulting targets as well as of the targets needed to use the given
* build configuration provider.
*/
@Override
public LoadingResult execute(
EventHandler eventHandler,
EventBus eventBus,
List<String> targetPatterns,
PathFragment relativeWorkingDirectory,
LoadingOptions options,
ListMultimap<String, Label> labelsToLoadUnconditionally,
boolean keepGoing,
boolean determineTests,
@Nullable LoadingCallback callback)
throws TargetParsingException, LoadingFailedException, InterruptedException {
LOG.info("Starting pattern evaluation");
Stopwatch timer = Stopwatch.createStarted();
if (options.buildTestsOnly && options.compileOneDependency) {
throw new LoadingFailedException("--compile_one_dependency cannot be used together with "
+ "the --build_tests_only option or the 'bazel test' command ");
}
targetPatternEvaluator.updateOffset(relativeWorkingDirectory);
EventHandler parseFailureListener = new ParseFailureListenerImpl(eventHandler, eventBus);
// Determine targets to build:
ResolvedTargets<Target> targets = getTargetsToBuild(parseFailureListener,
targetPatterns, options.compileOneDependency, keepGoing);
ImmutableSet<Target> filteredTargets = targets.getFilteredTargets();
boolean buildTestsOnly = options.buildTestsOnly;
ImmutableSet<Target> testsToRun = null;
ImmutableSet<Target> testFilteredTargets = ImmutableSet.of();
// Now we have a list of targets to build. If the --build_tests_only option was specified or we
// want to run tests, we need to determine the list of targets to test. For that, we remove
// manual tests and apply the command line filters. Also, if --build_tests_only is specified,
// then the list of filtered targets will be set as build list as well.
if (determineTests || buildTestsOnly) {
// Parse the targets to get the tests.
ResolvedTargets<Target> testTargets = determineTests(parseFailureListener,
targetPatterns, options, keepGoing);
if (testTargets.getTargets().isEmpty() && !testTargets.getFilteredTargets().isEmpty()) {
eventHandler.handle(Event.warn("All specified test targets were excluded by filters"));
}
if (buildTestsOnly) {
// Replace original targets to build with test targets, so that only targets that are
// actually going to be built are loaded in the loading phase. Note that this has a side
// effect that any test_suite target requested to be built is replaced by the set of *_test
// targets it represents; for example, this affects the status and the summary reports.
Set<Target> allFilteredTargets = new HashSet<>();
allFilteredTargets.addAll(targets.getTargets());
allFilteredTargets.addAll(targets.getFilteredTargets());
allFilteredTargets.removeAll(testTargets.getTargets());
allFilteredTargets.addAll(testTargets.getFilteredTargets());
testFilteredTargets = ImmutableSet.copyOf(allFilteredTargets);
filteredTargets = ImmutableSet.of();
targets = ResolvedTargets.<Target>builder()
.merge(testTargets)
.mergeError(targets.hasError())
.build();
if (determineTests) {
testsToRun = testTargets.getTargets();
}
} else /*if (determineTests)*/ {
testsToRun = testTargets.getTargets();
targets = ResolvedTargets.<Target>builder()
.merge(targets)
// Avoid merge() here which would remove the filteredTargets from the targets.
.addAll(testsToRun)
.mergeError(testTargets.hasError())
.build();
// filteredTargets is correct in this case - it cannot contain tests that got back in
// through test_suite expansion, because the test determination would also filter those out.
// However, that's not obvious, and it might be better to explicitly recompute it.
}
if (testsToRun != null) {
// Note that testsToRun can still be null here, if buildTestsOnly && !shouldRunTests.
Preconditions.checkState(targets.getTargets().containsAll(testsToRun));
}
}
eventBus.post(new TargetParsingCompleteEvent(targets.getTargets(),
filteredTargets, testFilteredTargets,
timer.stop().elapsed(TimeUnit.MILLISECONDS)));
if (targets.hasError()) {
eventHandler.handle(Event.warn("Target pattern parsing failed. Continuing anyway"));
}
if (callback != null) {
callback.notifyTargets(targets.getTargets());
}
LoadingPhaseRunner.maybeReportDeprecation(eventHandler, targets.getTargets());
return doSimpleLoadingPhase(eventHandler, eventBus, targets, testsToRun, keepGoing);
}
/**
* Perform test_suite expansion and emits necessary events and logging messages for legacy
* support.
*/
private LoadingResult doSimpleLoadingPhase(
EventHandler eventHandler,
EventBus eventBus,
ResolvedTargets<Target> targets,
ImmutableSet<Target> testsToRun,
boolean keepGoing)
throws InterruptedException, LoadingFailedException {
Stopwatch timer = preLoadingLogging(eventHandler);
ImmutableSet<Target> targetsToLoad = targets.getTargets();
ResolvedTargets<Target> expandedResult;
try {
expandedResult = expandTestSuites(eventHandler, targetsToLoad, keepGoing);
} catch (TargetParsingException e) {
throw new LoadingFailedException("Loading failed; build aborted", e);
}
postLoadingLogging(eventBus, targetsToLoad, expandedResult.getTargets(), timer);
return new LoadingResult(targets.hasError(), expandedResult.hasError(),
expandedResult.getTargets(), testsToRun, getWorkspaceName(eventHandler));
}
private Stopwatch preLoadingLogging(EventHandler eventHandler) {
eventHandler.handle(Event.progress("Loading..."));
LOG.info("Starting loading phase");
return Stopwatch.createStarted();
}
private void postLoadingLogging(EventBus eventBus, ImmutableSet<Target> originalTargetsToLoad,
ImmutableSet<Target> expandedTargetsToLoad, Stopwatch timer) {
Set<Target> testSuiteTargets = Sets.difference(originalTargetsToLoad, expandedTargetsToLoad);
eventBus.post(new LoadingPhaseCompleteEvent(
expandedTargetsToLoad, ImmutableSet.copyOf(testSuiteTargets),
packageManager.getStatistics(), timer.stop().elapsed(TimeUnit.MILLISECONDS)));
LOG.info("Loading phase finished");
}
private ResolvedTargets<Target> expandTestSuites(EventHandler eventHandler,
ImmutableSet<Target> targets, boolean keepGoing)
throws LoadingFailedException, TargetParsingException {
// We use strict test_suite expansion here to match the analysis-time checks.
ResolvedTargets<Target> expandedResult = TestTargetUtils.expandTestSuites(
packageManager, eventHandler, targets, /*strict=*/true, /*keepGoing=*/true);
if (expandedResult.hasError() && !keepGoing) {
throw new LoadingFailedException("Could not expand test suite target");
}
return expandedResult;
}
/**
* Interpret the command-line arguments.
*
* @param targetPatterns the list of command-line target patterns specified by the user
* @param compileOneDependency if true, enables alternative interpretation of targetPatterns; see
* {@link LoadingOptions#compileOneDependency}
* @throws TargetParsingException if parsing failed and !keepGoing
*/
private ResolvedTargets<Target> getTargetsToBuild(EventHandler eventHandler,
List<String> targetPatterns, boolean compileOneDependency,
boolean keepGoing) throws TargetParsingException, InterruptedException {
ResolvedTargets<Target> result =
targetPatternEvaluator.parseTargetPatternList(eventHandler, targetPatterns,
FilteringPolicies.FILTER_MANUAL, keepGoing);
if (compileOneDependency) {
return new CompileOneDependencyTransformer(packageManager)
.transformCompileOneDependency(eventHandler, result);
}
return result;
}
/**
* Interpret test target labels from the command-line arguments and return the corresponding set
* of targets, handling the filter flags, and expanding test suites.
*
* @param eventHandler the error event eventHandler
* @param targetPatterns the list of command-line target patterns specified by the user
* @param options the loading phase options
* @param keepGoing value of the --keep_going flag
*/
private ResolvedTargets<Target> determineTests(EventHandler eventHandler,
List<String> targetPatterns, LoadingOptions options, boolean keepGoing)
throws TargetParsingException, InterruptedException {
// Parse the targets to get the tests.
ResolvedTargets<Target> testTargetsBuilder = targetPatternEvaluator.parseTargetPatternList(
eventHandler, targetPatterns, FilteringPolicies.FILTER_TESTS, keepGoing);
ResolvedTargets.Builder<Target> finalBuilder = ResolvedTargets.builder();
finalBuilder.merge(testTargetsBuilder);
finalBuilder.filter(TestFilter.forOptions(options, eventHandler, ruleNames));
return finalBuilder.build();
}
private String getWorkspaceName(EventHandler eventHandler)
throws InterruptedException, LoadingFailedException {
try {
return packageManager.getPackage(eventHandler, Label.EXTERNAL_PACKAGE_IDENTIFIER)
.getWorkspaceName();
} catch (NoSuchPackageException e) {
throw new LoadingFailedException("Failed to load //external package", e);
}
}
}
| |
/*
* Copyright 2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package leap.lang.io;
import java.io.File;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class FileFilters {
private static final FileFilter2 FILE = new FileFilter();
private static final FileFilter2 DIR = new DirectoryFileFilter();
private static final FileFilter2 HIDDEN = new HiddenFilter();
private static final FileFilter2 VISIBLE = new NotFilter(HIDDEN);
private static final FileFilter2 CAN_READ = new CanReadFilter();
private static final FileFilter2 CANNOT_READ = new NotFilter(new CanReadFilter());
private static final FileFilter2 CAN_WRITE = new CanWriteFilter();
private static final FileFilter2 CANNOT_WRITE = new NotFilter(new CanWriteFilter());
private static final FileFilter2 READONLY = new AndFilter(CAN_READ,CANNOT_WRITE);
protected FileFilters(){
}
public static FileFilter2 isFile(){
return FILE;
}
public static FileFilter2 isDirectory(){
return DIR;
}
public static FileFilter2 isReadonly(){
return READONLY;
}
public static FileFilter2 canRead(){
return CAN_READ;
}
public static FileFilter2 canNotRead(){
return CANNOT_READ;
}
public static FileFilter2 canWrite(){
return CAN_WRITE;
}
public static FileFilter2 canNotWrite(){
return CANNOT_WRITE;
}
public static FileFilter2 endsWith(String... suffixes){
return new EndsWithFilter(suffixes);
}
public static FileFilter2 nameEquals(String... names){
return new NameEqualsFilter(names);
}
public static FileFilter2 isHidden(){
return HIDDEN;
}
public static FileFilter2 isVisible(){
return VISIBLE;
}
public static FileFilter2 and(FileFilter2... filters){
return new AndFilter(filters);
}
public static FileFilter2 or(FileFilter2... filters){
return new OrFilter(filters);
}
public static FileFilter2 not(FileFilter2 filter){
return new NotFilter(filter);
}
static class CanReadFilter extends AbstractFileFilter{
@Override
public boolean accept(File file) {
return file.canRead();
}
}
static class CanWriteFilter extends AbstractFileFilter {
@Override
public boolean accept(File file) {
return file.canWrite();
}
}
static class HiddenFilter extends AbstractFileFilter{
@Override
public boolean accept(File file) {
return file.isHidden();
}
}
static class DirectoryFileFilter extends AbstractFileFilter{
@Override
public boolean accept(File file) {
return file.isDirectory();
}
}
static class NameEqualsFilter extends AbstractFileFilter {
private final String[] names;
private final IOCase caseSensitivity;
public NameEqualsFilter(String name) {
this(name, null);
}
public NameEqualsFilter(String name, IOCase caseSensitivity) {
if (name == null) {
throw new IllegalArgumentException("The name must not be null");
}
this.names = new String[] {name};
this.caseSensitivity = caseSensitivity == null ? IOCase.SENSITIVE : caseSensitivity;
}
public NameEqualsFilter(String... names) {
this(names, null);
}
public NameEqualsFilter(String[] names, IOCase caseSensitivity) {
if (names == null) {
throw new IllegalArgumentException("The array of names must not be null");
}
this.names = new String[names.length];
System.arraycopy(names, 0, this.names, 0, names.length);
this.caseSensitivity = caseSensitivity == null ? IOCase.SENSITIVE : caseSensitivity;
}
@Override
public boolean accept(File file) {
String name = file.getName();
for (String name2 : this.names) {
if (caseSensitivity.checkEquals(name, name2)) {
return true;
}
}
return false;
}
@Override
public boolean accept(File dir, String name) {
for (String name2 : names) {
if (caseSensitivity.checkEquals(name, name2)) {
return true;
}
}
return false;
}
@Override
public String toString() {
StringBuilder buffer = new StringBuilder();
buffer.append(super.toString());
buffer.append("(");
if (names != null) {
for (int i = 0; i < names.length; i++) {
if (i > 0) {
buffer.append(",");
}
buffer.append(names[i]);
}
}
buffer.append(")");
return buffer.toString();
}
}
static class NotFilter extends AbstractFileFilter{
private final FileFilter2 filter;
public NotFilter(FileFilter2 filter) {
if (filter == null) {
throw new IllegalArgumentException("The filter must not be null");
}
this.filter = filter;
}
@Override
public boolean accept(File file) {
return ! filter.accept(file);
}
@Override
public boolean accept(File file, String name) {
return ! filter.accept(file, name);
}
@Override
public String toString() {
return super.toString() + "(" + filter.toString() + ")";
}
}
static class AndFilter extends AbstractFileFilter {
private final List<FileFilter2> filters;
public AndFilter() {
this.filters = new ArrayList<FileFilter2>();
}
public AndFilter(final List<FileFilter2> filters) {
if (filters == null) {
this.filters = new ArrayList<FileFilter2>();
} else {
this.filters = new ArrayList<FileFilter2>(filters);
}
}
public AndFilter(final FileFilter2... filters) {
if (filters == null) {
this.filters = new ArrayList<FileFilter2>();
} else {
this.filters = Arrays.asList(filters);
}
}
@Override
public boolean accept(final File file) {
if (this.filters.isEmpty()) {
return false;
}
for (FileFilter2 fileFilter : filters) {
if (!fileFilter.accept(file)) {
return false;
}
}
return true;
}
@Override
public boolean accept(final File file, final String name) {
if (this.filters.isEmpty()) {
return false;
}
for (FileFilter2 fileFilter : filters) {
if (!fileFilter.accept(file, name)) {
return false;
}
}
return true;
}
@Override
public String toString() {
StringBuilder buffer = new StringBuilder();
buffer.append(super.toString());
buffer.append("(");
if (filters != null) {
for (int i = 0; i < filters.size(); i++) {
if (i > 0) {
buffer.append(",");
}
Object filter = filters.get(i);
buffer.append(filter == null ? "null" : filter.toString());
}
}
buffer.append(")");
return buffer.toString();
}
}
static class OrFilter extends AbstractFileFilter{
private final List<FileFilter2> filters;
public OrFilter() {
this.filters = new ArrayList<FileFilter2>();
}
public OrFilter(final List<FileFilter2> fileFilters) {
if (fileFilters == null) {
this.filters = new ArrayList<FileFilter2>();
} else {
this.filters = new ArrayList<FileFilter2>(fileFilters);
}
}
public OrFilter(final FileFilter2... filters) {
if (filters == null) {
this.filters = new ArrayList<FileFilter2>();
} else {
this.filters = Arrays.asList(filters);
}
}
@Override
public boolean accept(final File file) {
for (FileFilter2 fileFilter : filters) {
if (fileFilter.accept(file)) {
return true;
}
}
return false;
}
@Override
public boolean accept(final File file, final String name) {
for (FileFilter2 fileFilter : filters) {
if (fileFilter.accept(file, name)) {
return true;
}
}
return false;
}
@Override
public String toString() {
StringBuilder buffer = new StringBuilder();
buffer.append(super.toString());
buffer.append("(");
if (filters != null) {
for (int i = 0; i < filters.size(); i++) {
if (i > 0) {
buffer.append(",");
}
Object filter = filters.get(i);
buffer.append(filter == null ? "null" : filter.toString());
}
}
buffer.append(")");
return buffer.toString();
}
}
static class FileFilter extends AbstractFileFilter {
@Override
public boolean accept(File file) {
return file.isFile();
}
}
static class EndsWithFilter extends AbstractFileFilter {
private final String[] suffixes;
private final IOCase caseSensitivity;
public EndsWithFilter(String suffix) {
this(suffix, IOCase.SENSITIVE);
}
public EndsWithFilter(String suffix, IOCase caseSensitivity) {
if (suffix == null) {
throw new IllegalArgumentException("The suffix must not be null");
}
this.suffixes = new String[] {suffix};
this.caseSensitivity = caseSensitivity == null ? IOCase.SENSITIVE : caseSensitivity;
}
public EndsWithFilter(String[] suffixes) {
this(suffixes, IOCase.SENSITIVE);
}
public EndsWithFilter(String[] suffixes, IOCase caseSensitivity) {
if (suffixes == null) {
throw new IllegalArgumentException("The array of suffixes must not be null");
}
this.suffixes = new String[suffixes.length];
System.arraycopy(suffixes, 0, this.suffixes, 0, suffixes.length);
this.caseSensitivity = caseSensitivity == null ? IOCase.SENSITIVE : caseSensitivity;
}
@Override
public boolean accept(File file) {
String name = file.getName();
for (String suffix : this.suffixes) {
if (caseSensitivity.checkEndsWith(name, suffix)) {
return true;
}
}
return false;
}
@Override
public boolean accept(File file, String name) {
for (String suffix : this.suffixes) {
if (caseSensitivity.checkEndsWith(name, suffix)) {
return true;
}
}
return false;
}
@Override
public String toString() {
StringBuilder buffer = new StringBuilder();
buffer.append(super.toString());
buffer.append("(");
if (suffixes != null) {
for (int i = 0; i < suffixes.length; i++) {
if (i > 0) {
buffer.append(",");
}
buffer.append(suffixes[i]);
}
}
buffer.append(")");
return buffer.toString();
}
}
static abstract class AbstractFileFilter implements FileFilter2 {
public boolean accept(File file) {
return accept(file.getParentFile(), file.getName());
}
public boolean accept(File dir, String name) {
return accept(new File(dir, name));
}
@Override
public String toString() {
return getClass().getSimpleName();
}
}
}
| |
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.java.psi.formatter.java;
import com.intellij.JavaTestUtil;
import com.intellij.application.options.CodeStyle;
import com.intellij.codeInsight.actions.ReformatCodeProcessor;
import com.intellij.formatting.FormatterTestUtils.Action;
import com.intellij.lang.java.JavaLanguage;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.impl.DocumentImpl;
import com.intellij.openapi.fileTypes.StdFileTypes;
import com.intellij.openapi.roots.LanguageLevelProjectExtension;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.CharsetToolkit;
import com.intellij.pom.java.LanguageLevel;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.PsiFile;
import com.intellij.psi.codeStyle.*;
import com.intellij.testFramework.LightIdeaTestCase;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.text.LineReader;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.util.List;
import static com.intellij.formatting.FormatterTestUtils.ACTIONS;
import static com.intellij.formatting.FormatterTestUtils.Action.REFORMAT;
/**
* Base class for java formatter tests that holds utility methods.
*
* @author Denis Zhdanov
*/
public abstract class AbstractJavaFormatterTest extends LightIdeaTestCase {
private JavaCodeStyleBean myCodeStyleBean;
@NotNull
public static String shiftIndentInside(@NotNull String initial, final int i, boolean shiftEmptyLines) {
StringBuilder result = new StringBuilder(initial.length());
List<byte[]> lines;
try {
LineReader reader = new LineReader(new ByteArrayInputStream(initial.getBytes(CharsetToolkit.UTF8_CHARSET)));
lines = reader.readLines();
}
catch (IOException e) {
throw new RuntimeException(e);
}
boolean first = true;
for (byte[] line : lines) {
try {
if (!first) result.append('\n');
if (line.length > 0 || shiftEmptyLines) {
StringUtil.repeatSymbol(result, ' ', i);
}
result.append(new String(line, CharsetToolkit.UTF8_CHARSET));
}
finally {
first = false;
}
}
return result.toString();
}
@NotNull
public JavaCodeStyleBean getCodeStyleBean() {
if (myCodeStyleBean == null) {
myCodeStyleBean = new JavaCodeStyleBean();
myCodeStyleBean.setRootSettings(CodeStyle.getSettings(getProject()));
}
return myCodeStyleBean;
}
public static JavaCodeStyleSettings getJavaSettings() {
return getSettings().getRootSettings().getCustomSettings(JavaCodeStyleSettings.class);
}
private static final String BASE_PATH = JavaTestUtil.getJavaTestDataPath() + "/psi/formatter/java";
public TextRange myTextRange;
public TextRange myLineRange;
@Override
protected void setUp() throws Exception {
super.setUp();
LanguageLevelProjectExtension.getInstance(getProject()).setLanguageLevel(LanguageLevel.HIGHEST);
}
public static CommonCodeStyleSettings getSettings() {
CodeStyleSettings rootSettings = CodeStyle.getSettings(getProject());
return rootSettings.getCommonSettings(JavaLanguage.INSTANCE);
}
public static CommonCodeStyleSettings.IndentOptions getIndentOptions() {
return getSettings().getRootSettings().getIndentOptions(StdFileTypes.JAVA);
}
public void doTest() {
doTest(getTestName(false) + ".java", getTestName(false) + "_after.java");
}
public void doTest(@NotNull String fileNameBefore, @NotNull String fileNameAfter) {
doTextTest(REFORMAT, loadFile(fileNameBefore), loadFile(fileNameAfter));
}
public void doTestWithDetectableIndentOptions(@NotNull String text, @NotNull String textAfter) {
DetectableIndentOptionsProvider provider = DetectableIndentOptionsProvider.getInstance();
assertNotNull("DetectableIndentOptionsProvider not found", provider);
provider.setEnabledInTest(true);
try {
doTextTest(text, textAfter);
}
finally {
provider.setEnabledInTest(false);
}
}
public void doTextTest(@NotNull String text, @NotNull String textAfter) throws IncorrectOperationException {
doTextTest(REFORMAT, text, textAfter);
}
public void doTextTest(@NotNull Action action, @NotNull String text, @NotNull String textAfter) throws IncorrectOperationException {
final PsiFile file = createFile("A.java", text);
final PsiDocumentManager manager = PsiDocumentManager.getInstance(getProject());
final Document document = manager.getDocument(file);
if (document == null) {
fail("Document is null");
return;
}
replaceAndProcessDocument(action, text, file, document);
assertEquals(textAfter, document.getText());
manager.commitDocument(document);
assertEquals(textAfter, file.getText());
}
public void formatEveryoneAndCheckIfResultEqual(@NotNull final String...before) {
assert before.length > 1;
final PsiFile file = createFile("A.java", "");
final PsiDocumentManager manager = PsiDocumentManager.getInstance(getProject());
final Document document = manager.getDocument(file);
String afterFirst = replaceAndProcessDocument(REFORMAT, before[0], file, document);
for (String nextBefore: before) {
assertEquals(afterFirst, replaceAndProcessDocument(REFORMAT, nextBefore, file, document));
}
}
@NotNull
private String replaceAndProcessDocument(@NotNull final Action action,
@NotNull final String text,
@NotNull final PsiFile file,
@Nullable final Document document) throws IncorrectOperationException
{
if (document == null) {
fail("Don't expect the document to be null");
return null;
}
if (myLineRange != null) {
final DocumentImpl doc = new DocumentImpl(text);
myTextRange =
new TextRange(doc.getLineStartOffset(myLineRange.getStartOffset()), doc.getLineEndOffset(myLineRange.getEndOffset()));
}
final PsiDocumentManager manager = PsiDocumentManager.getInstance(getProject());
CommandProcessor.getInstance().executeCommand(getProject(), () -> ApplicationManager.getApplication().runWriteAction(() -> {
document.replaceString(0, document.getTextLength(), text);
manager.commitDocument(document);
try {
TextRange rangeToUse = myTextRange;
if (rangeToUse == null) {
rangeToUse = file.getTextRange();
}
ACTIONS.get(action).run(file, rangeToUse.getStartOffset(), rangeToUse.getEndOffset());
}
catch (IncorrectOperationException e) {
assertTrue(e.getLocalizedMessage(), false);
}
}), action == REFORMAT ? ReformatCodeProcessor.COMMAND_NAME : "", "");
return document.getText();
}
public void doMethodTest(@NotNull String before, @NotNull String after) {
doTextTest(
REFORMAT,
"class Foo{\n" + " void foo() {\n" + before + '\n' + " }\n" + "}",
"class Foo {\n" + " void foo() {\n" + shiftIndentInside(after, 8, false) + '\n' + " }\n" + "}"
);
}
public void doClassTest(@NotNull String before, @NotNull String after) {
doTextTest(
REFORMAT,
"class Foo{\n" + before + '\n' + "}",
"class Foo {\n" + shiftIndentInside(after, 4, false) + '\n' + "}"
);
}
protected static String loadFile(String name) {
String fullName = BASE_PATH + File.separatorChar + name;
try {
String text = FileUtil.loadFile(new File(fullName));
return StringUtil.convertLineSeparators(text);
}
catch (IOException e) {
throw new RuntimeException(e);
}
}
}
| |
/*
* Copyright 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.template.soy.jbcsrc;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.common.base.Optional;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.google.common.collect.ImmutableList;
import com.google.common.primitives.Ints;
import com.google.template.soy.data.SanitizedContent.ContentKind;
import com.google.template.soy.data.SoyList;
import com.google.template.soy.data.SoyRecord;
import com.google.template.soy.data.SoyValue;
import com.google.template.soy.data.SoyValueProvider;
import com.google.template.soy.jbcsrc.Expression.Feature;
import com.google.template.soy.jbcsrc.Expression.Features;
import com.google.template.soy.jbcsrc.api.AdvisingAppendable;
import com.google.template.soy.jbcsrc.api.AdvisingStringBuilder;
import com.google.template.soy.jbcsrc.api.RenderResult;
import com.google.template.soy.jbcsrc.shared.CompiledTemplate;
import com.google.template.soy.jbcsrc.shared.RenderContext;
import org.objectweb.asm.ClassVisitor;
import org.objectweb.asm.Label;
import org.objectweb.asm.Opcodes;
import org.objectweb.asm.Type;
import org.objectweb.asm.commons.Method;
import org.objectweb.asm.util.Printer;
import java.lang.reflect.Array;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
/**
* A set of utilities for generating simple expressions in bytecode
*/
final class BytecodeUtils {
static final TypeInfo OBJECT = TypeInfo.create(Object.class);
static final Type STRING_TYPE = Type.getType(String.class);
static final Type ARRAY_LIST_TYPE = Type.getType(ArrayList.class);
static final Type ADVISING_APPENDABLE_TYPE = Type.getType(AdvisingAppendable.class);
static final Type ADVISING_BUILDER_TYPE = Type.getType(AdvisingStringBuilder.class);
static final Type RENDER_RESULT_TYPE = Type.getType(RenderResult.class);
static final Type NULL_POINTER_EXCEPTION_TYPE = Type.getType(NullPointerException.class);
static final Type RENDER_CONTEXT_TYPE = Type.getType(RenderContext.class);
static final Type SOY_RECORD_TYPE = Type.getType(SoyRecord.class);
static final Type LINKED_HASH_MAP_TYPE = Type.getType(LinkedHashMap.class);
static final Type SOY_VALUE_TYPE = Type.getType(SoyValue.class);
static final Type SOY_VALUE_PROVIDER_TYPE = Type.getType(SoyValueProvider.class);
static final Type THROWABLE_TYPE = Type.getType(Throwable.class);
static final Type SOY_LIST_TYPE = Type.getType(SoyList.class);
static final Type CONTENT_KIND_TYPE = Type.getType(ContentKind.class);
static final Type COMPILED_TEMPLATE_TYPE = Type.getType(CompiledTemplate.class);
static final Method NULLARY_INIT = Method.getMethod("void <init>()");
static final Method CLASS_INIT = Method.getMethod("void <clinit>()");
private static final LoadingCache<Type, Optional<Class<?>>> objectTypeToClassCache =
CacheBuilder.newBuilder()
.build(new CacheLoader<Type, Optional<Class<?>>>() {
@Override public Optional<Class<?>> load(Type key) throws Exception {
switch (key.getSort()) {
case Type.ARRAY:
Optional<Class<?>> elementType =
objectTypeToClassCache.getUnchecked(key.getElementType());
if (elementType.isPresent()) {
// The easiest way to generically get an array class.
return Optional.<Class<?>>of(Array.newInstance(elementType.get(), 0).getClass());
}
return Optional.absent();
case Type.VOID:
return Optional.<Class<?>>of(void.class);
case Type.BOOLEAN:
return Optional.<Class<?>>of(boolean.class);
case Type.BYTE:
return Optional.<Class<?>>of(byte.class);
case Type.CHAR:
return Optional.<Class<?>>of(char.class);
case Type.DOUBLE:
return Optional.<Class<?>>of(double.class);
case Type.INT:
return Optional.<Class<?>>of(int.class);
case Type.SHORT:
return Optional.<Class<?>>of(short.class);
case Type.LONG:
return Optional.<Class<?>>of(long.class);
case Type.FLOAT:
return Optional.<Class<?>>of(float.class);
case Type.OBJECT:
try {
return Optional.<Class<?>>of(
Class.forName(key.getClassName(), false, BytecodeUtils.class.getClassLoader()));
} catch (ClassNotFoundException e) {
return Optional.absent();
}
default:
throw new IllegalArgumentException("unsupported type: " + key);
}
}
});
private BytecodeUtils() {}
/**
* Returns {@code true} if {@code left} is possibly assignable from {@code right}.
*/
static boolean isPossiblyAssignableFrom(Type left, Type right) {
return doIsAssignableFrom(left, right, true);
}
/** Returns {@code true} if {@code left} is definitely assignable from {@code right}. */
static boolean isDefinitelyAssignableFrom(Type left, Type right) {
return doIsAssignableFrom(left, right, false);
}
/**
* Checks if {@code left} is assignable from {@code right}, however if we don't have information
* about one of the types then this returns {@code failOpen}.
*/
private static boolean doIsAssignableFrom(Type left, Type right, boolean failOpen) {
if (left.equals(right)) {
return true;
}
if (left.getSort() != right.getSort()) {
return false;
}
if (left.getSort() != Type.OBJECT) {
return false; // all other sorts require exact equality (even arrays)
}
// for object types we really need to know type hierarchy information to test for whether
// right is assignable to left.
Optional<Class<?>> leftClass = objectTypeToClassCache.getUnchecked(left);
Optional<Class<?>> rightClass = objectTypeToClassCache.getUnchecked(right);
if (!leftClass.isPresent() || rightClass.isPresent()) {
// This means one of the types being compared is a generated object. So we can't easily check
// it. Just delegate responsibility to the verifier.
return failOpen;
}
return leftClass.get().isAssignableFrom(rightClass.get());
}
/**
* Returns the runtime class represented by the given type.
*
* @throws IllegalArgumentException if the class cannot be found. It is expected that this
* method will only be called for types that have a runtime on the compilers classpath.
*/
static Class<?> classFromAsmType(Type type) {
Optional<Class<?>> maybeClass = objectTypeToClassCache.getUnchecked(type);
if (!maybeClass.isPresent()) {
throw new IllegalArgumentException("Could not load: " + type);
}
return maybeClass.get();
}
private static final Expression FALSE =
new Expression(Type.BOOLEAN_TYPE, Feature.CHEAP) {
@Override
void doGen(CodeBuilder mv) {
mv.pushBoolean(false);
}
};
private static final Expression TRUE =
new Expression(Type.BOOLEAN_TYPE, Feature.CHEAP) {
@Override
void doGen(CodeBuilder mv) {
mv.pushBoolean(true);
}
};
/** Returns an {@link Expression} that can load the given 'boolean' constant. */
static Expression constant(boolean value) {
return value ? TRUE : FALSE;
}
/** Returns an {@link Expression} that can load the given 'int' constant. */
static Expression constant(final int value) {
return new Expression(Type.INT_TYPE, Feature.CHEAP) {
@Override void doGen(CodeBuilder mv) {
mv.pushInt(value);
}
};
}
/** Returns an {@link Expression} that can load the given 'char' constant. */
static Expression constant(final char value) {
return new Expression(Type.CHAR_TYPE, Feature.CHEAP) {
@Override void doGen(CodeBuilder mv) {
mv.pushInt(value);
}
};
}
/** Returns an {@link Expression} that can load the given long constant. */
static Expression constant(final long value) {
return new Expression(Type.LONG_TYPE, Feature.CHEAP) {
@Override void doGen(CodeBuilder mv) {
mv.pushLong(value);
}
};
}
/** Returns an {@link Expression} that can load the given double constant. */
static Expression constant(final double value) {
return new Expression(Type.DOUBLE_TYPE, Feature.CHEAP) {
@Override void doGen(CodeBuilder mv) {
mv.pushDouble(value);
}
};
}
/** Returns an {@link Expression} that can load the given String constant. */
static Expression constant(final String value) {
checkNotNull(value);
return new Expression(STRING_TYPE, Feature.CHEAP, Feature.NON_NULLABLE) {
@Override
void doGen(CodeBuilder mv) {
mv.pushString(value);
}
};
}
/** Returns an {@link Expression} with the given type that always returns null. */
static Expression constantNull(Type type) {
checkArgument(
type.getSort() == Type.OBJECT || type.getSort() == Type.ARRAY,
"%s is not a reference type",
type);
return new Expression(type, Feature.CHEAP) {
@Override
void doGen(CodeBuilder mv) {
mv.visitInsn(Opcodes.ACONST_NULL);
}
};
}
/**
* Returns an expression that does a numeric conversion cast from the given expression to the
* given type.
*
* @throws IllegalArgumentException if either the expression or the target type is not a numeric
* primitive
*/
static Expression numericConversion(final Expression expr, final Type to) {
if (to.equals(expr.resultType())) {
return expr;
}
if (!isNumericPrimitive(to) || !isNumericPrimitive(expr.resultType())) {
throw new IllegalArgumentException("Cannot convert from " + expr.resultType() + " to " + to);
}
return new Expression(to, expr.features()) {
@Override void doGen(CodeBuilder adapter) {
expr.gen(adapter);
adapter.cast(expr.resultType(), to);
}
};
}
private static boolean isNumericPrimitive(Type type) {
int sort = type.getSort();
switch (sort) {
case Type.OBJECT:
case Type.ARRAY:
case Type.VOID:
case Type.METHOD:
case Type.BOOLEAN:
return false;
case Type.BYTE:
case Type.CHAR:
case Type.DOUBLE:
case Type.INT:
case Type.SHORT:
case Type.LONG:
case Type.FLOAT:
return true;
default:
throw new AssertionError("unexpected type " + type);
}
}
static boolean isPrimitive(Type type) {
switch (type.getSort()) {
case Type.OBJECT:
case Type.ARRAY:
return false;
case Type.BOOLEAN:
case Type.BYTE:
case Type.CHAR:
case Type.DOUBLE:
case Type.INT:
case Type.SHORT:
case Type.LONG:
case Type.FLOAT:
return true;
case Type.VOID:
case Type.METHOD:
throw new IllegalArgumentException("Invalid type: " + type);
default:
throw new AssertionError("unexpected type " + type);
}
}
/**
* Generates a default nullary public constructor for the given type on the {@link ClassVisitor}.
*
* <p>For java classes this is normally generated by the compiler and looks like: <pre>{@code
* public Foo() {
* super();
* }}</pre>
*/
static void defineDefaultConstructor(ClassVisitor cv, TypeInfo ownerType) {
CodeBuilder mg = new CodeBuilder(Opcodes.ACC_PUBLIC, NULLARY_INIT, null, cv);
mg.visitCode();
Label start = mg.mark();
Label end = mg.newLabel();
LocalVariable thisVar = LocalVariable.createThisVar(ownerType, start, end);
thisVar.gen(mg);
mg.invokeConstructor(OBJECT.type(), NULLARY_INIT);
mg.returnValue();
mg.mark(end);
thisVar.tableEntry(mg);
mg.endMethod();
}
// TODO(lukes): some of these branch operators are a little too branchy. For example, the
// expression a == b || a == c, could be implemented by
// logicalOr(compare(Opcodes.IFEQ, a, b), compare(Opcodes.IFEQ, a, c)), but that is not optimal
// instead we could allow compare to take an expression for what to do when the comparison fails
// that way we could save a branch. Maybe these operators are a failed abstraction?
/**
* Compares the two primitive valued expressions using the provided comparison operation.
*/
static Expression compare(final int comparisonOpcode, final Expression left,
final Expression right) {
checkArgument(left.resultType().equals(right.resultType()),
"left and right must have matching types, found %s and %s", left.resultType(),
right.resultType());
checkIntComparisonOpcode(left.resultType(), comparisonOpcode);
Features features =
Expression.areAllCheap(left, right) ? Features.of(Feature.CHEAP) : Features.of();
return new Expression(Type.BOOLEAN_TYPE, features) {
@Override void doGen(CodeBuilder mv) {
left.gen(mv);
right.gen(mv);
Label ifTrue = mv.newLabel();
Label end = mv.newLabel();
mv.ifCmp(left.resultType(), comparisonOpcode, ifTrue);
mv.pushBoolean(false);
mv.goTo(end);
mv.mark(ifTrue);
mv.pushBoolean(true);
mv.mark(end);
}
};
}
private static void checkIntComparisonOpcode(Type comparisonType, int opcode) {
switch (opcode) {
case Opcodes.IFEQ:
case Opcodes.IFNE:
return;
case Opcodes.IFGT:
case Opcodes.IFGE:
case Opcodes.IFLT:
case Opcodes.IFLE:
if (comparisonType.getSort() == Type.ARRAY || comparisonType.getSort() == Type.OBJECT) {
throw new IllegalArgumentException(
"Type: " + comparisonType + " cannot be compared via " + Printer.OPCODES[opcode]);
}
return;
}
throw new IllegalArgumentException("Unsupported opcode for comparison operation: " + opcode);
}
/**
* Returns an expression that evaluates to the logical negation of the given boolean valued
* expression.
*/
static Expression logicalNot(final Expression baseExpr) {
baseExpr.checkAssignableTo(Type.BOOLEAN_TYPE);
checkArgument(baseExpr.resultType().equals(Type.BOOLEAN_TYPE), "not a boolean expression");
return new Expression(Type.BOOLEAN_TYPE, baseExpr.features()) {
@Override void doGen(CodeBuilder mv) {
baseExpr.gen(mv);
// Surprisingly, java bytecode uses a branch (instead of 'xor 1' or something) to implement
// this. This is most likely useful for allowing true to be represented by any non-zero
// number.
Label ifTrue = mv.newLabel();
Label end = mv.newLabel();
mv.ifZCmp(Opcodes.IFNE, ifTrue); // if not 0 goto ifTrue
mv.pushBoolean(true);
mv.goTo(end);
mv.mark(ifTrue);
mv.pushBoolean(false);
mv.mark(end);
}
};
}
/**
* Compares two {@link SoyExpression}s for equality using soy == semantics.
*/
static Expression compareSoyEquals(final SoyExpression left, final SoyExpression right) {
// We can special case when we know the types.
// If either is a string, we run special logic so test for that first
// otherwise we special case primitives and eventually fall back to our runtime.
if (left.isKnownString()) {
return doEqualsString(left.unboxAs(String.class), right);
}
if (right.isKnownString()) {
return doEqualsString(right.unboxAs(String.class), left);
}
if (left.isKnownInt() && right.isKnownInt()) {
return compare(Opcodes.IFEQ, left.unboxAs(long.class), right.unboxAs(long.class));
}
if (left.isKnownNumber() && right.isKnownNumber()
&& (left.isKnownFloat() || right.isKnownFloat())) {
return compare(Opcodes.IFEQ, left.coerceToDouble(), right.coerceToDouble());
}
return MethodRef.RUNTIME_EQUAL.invoke(left.box(), right.box());
}
/**
* Compare a string valued expression to another expression using soy == semantics.
*
* @param stringExpr An expression that is known to be an unboxed string
* @param other An expression to compare it to.
*/
private static Expression doEqualsString(SoyExpression stringExpr, SoyExpression other) {
// This is compatible with SharedRuntime.compareString, which interestingly makes == break
// transitivity. See b/21461181
if (other.isKnownStringOrSanitizedContent()) {
return stringExpr.invoke(MethodRef.EQUALS, other.unboxAs(String.class));
}
if (other.isKnownNumber()) {
// in this case, we actually try to convert stringExpr to a number
return MethodRef.RUNTIME_STRING_EQUALS_AS_NUMBER.invoke(stringExpr, other.coerceToDouble());
}
// We don't know what other is, assume the worst and call out to our boxed implementation for
// string comparisons.
return MethodRef.RUNTIME_COMPARE_STRING.invoke(stringExpr, other.box());
}
/**
* Returns an expression that evaluates to {@code left} if left is non null, and evaluates to
* {@code right} otherwise.
*/
static Expression firstNonNull(final Expression left, final Expression right) {
checkArgument(left.resultType().getSort() == Type.OBJECT);
checkArgument(right.resultType().getSort() == Type.OBJECT);
Features features = Features.of();
if (Expression.areAllCheap(left, right)) {
features = features.plus(Feature.CHEAP);
}
if (right.isNonNullable()) {
features = features.plus(Feature.NON_NULLABLE);
}
return new Expression(left.resultType(), features) {
@Override void doGen(CodeBuilder cb) {
Label leftIsNonNull = new Label();
left.gen(cb); // Stack: L
cb.dup(); // Stack: L, L
cb.ifNonNull(leftIsNonNull); // Stack: L
// pop the extra copy of left
cb.pop(); // Stack:
right.gen(cb); // Stack: R
cb.mark(leftIsNonNull); // At this point the stack has an instance of L or R
}
};
}
/**
* Returns an expression that evaluates equivalently to a java ternary expression:
* {@code condition ? left : right}
*/
static Expression ternary(final Expression condition,
final Expression trueBranch,
final Expression falseBranch) {
checkArgument(condition.resultType().equals(Type.BOOLEAN_TYPE));
checkArgument(trueBranch.resultType().getSort() == falseBranch.resultType().getSort());
Features features = Features.of();
if (Expression.areAllCheap(condition, trueBranch, falseBranch)) {
features = features.plus(Feature.CHEAP);
}
if (trueBranch.isNonNullable() && falseBranch.isNonNullable()) {
features = features.plus(Feature.NON_NULLABLE);
}
return new Expression(trueBranch.resultType(), features) {
@Override void doGen(CodeBuilder mv) {
condition.gen(mv);
Label ifFalse = new Label();
Label end = new Label();
mv.visitJumpInsn(Opcodes.IFEQ, ifFalse); // if 0 goto ifFalse
trueBranch.gen(mv); // eval true branch
mv.visitJumpInsn(Opcodes.GOTO, end); // jump to the end
mv.visitLabel(ifFalse);
falseBranch.gen(mv); // eval false branch
mv.visitLabel(end);
}
};
}
/**
* Implements the short circuiting logical or ({@code ||}) operator over the list of boolean
* expressions.
*/
static Expression logicalOr(Expression ...expressions) {
return logicalOr(ImmutableList.copyOf(expressions));
}
/**
* Implements the short circuiting logical or ({@code ||}) operator over the list of boolean
* expressions.
*/
static Expression logicalOr(List<? extends Expression> expressions) {
return doShortCircuitingLogicalOperator(ImmutableList.copyOf(expressions), true);
}
/**
* Implements the short circuiting logical and ({@code &&}) operator over the list of boolean
* expressions.
*/
static Expression logicalAnd(Expression ...expressions) {
return logicalAnd(ImmutableList.copyOf(expressions));
}
/**
* Implements the short circuiting logical and ({@code &&}) operator over the list of boolean
* expressions.
*/
static Expression logicalAnd(List<? extends Expression> expressions) {
return doShortCircuitingLogicalOperator(ImmutableList.copyOf(expressions), false);
}
private static Expression doShortCircuitingLogicalOperator(
final ImmutableList<? extends Expression> expressions, final boolean isOrOperator) {
checkArgument(!expressions.isEmpty());
for (Expression expr : expressions) {
expr.checkAssignableTo(Type.BOOLEAN_TYPE);
}
if (expressions.size() == 1) {
return expressions.get(0);
}
return new Expression(Type.BOOLEAN_TYPE,
Expression.areAllCheap(expressions)
? Features.of(Feature.CHEAP)
: Features.of()) {
@Override void doGen(CodeBuilder adapter) {
Label end = new Label();
Label shortCircuit = new Label();
for (int i = 0; i < expressions.size(); i++) {
Expression expr = expressions.get(i);
expr.gen(adapter);
if (i == expressions.size() - 1) {
// if we are the last one, just goto end. Whatever the result of the last expression is
// determines the result of the whole expression (when all prior tests fail).
adapter.goTo(end);
} else {
adapter.ifZCmp(isOrOperator ? Opcodes.IFNE : Opcodes.IFEQ, shortCircuit);
}
}
adapter.mark(shortCircuit);
adapter.pushBoolean(isOrOperator); // default for || is true && is false
adapter.mark(end);
}
};
}
/**
* Returns an expression that returns a new {@link ArrayList} containing all the given items.
*/
static Expression asList(Iterable<? extends Expression> items) {
final ImmutableList<Expression> copy = ImmutableList.copyOf(items);
if (copy.isEmpty()) {
return MethodRef.IMMUTABLE_LIST_OF.invoke();
}
// Note, we cannot neccesarily use ImmutableList for anything besides the empty list because
// we may need to put a null in it.
final Expression construct = ConstructorRef.ARRAY_LIST_SIZE.construct(constant(copy.size()));
return new Expression(ARRAY_LIST_TYPE, Feature.NON_NULLABLE) {
@Override
void doGen(CodeBuilder mv) {
construct.gen(mv);
for (Expression child : copy) {
mv.dup();
child.gen(mv);
MethodRef.ARRAY_LIST_ADD.invokeUnchecked(mv);
mv.pop(); // pop the bool result of arraylist.add
}
}
};
}
/**
* Returns an expression that returns a new {@link LinkedHashMap} containing all the given
* entries.
*/
static Expression newLinkedHashMap(
Iterable<? extends Expression> keys,
Iterable<? extends Expression> values) {
final ImmutableList<Expression> keysCopy = ImmutableList.copyOf(keys);
final ImmutableList<Expression> valuesCopy = ImmutableList.copyOf(values);
checkArgument(keysCopy.size() == valuesCopy.size());
for (int i = 0; i < keysCopy.size(); i++) {
checkArgument(keysCopy.get(i).resultType().getSort() == Type.OBJECT);
checkArgument(valuesCopy.get(i).resultType().getSort() == Type.OBJECT);
}
final Expression construct = ConstructorRef.LINKED_HASH_MAP_SIZE
.construct(constant(hashMapCapacity(keysCopy.size())));
return new Expression(LINKED_HASH_MAP_TYPE, Feature.NON_NULLABLE) {
@Override
void doGen(CodeBuilder mv) {
construct.gen(mv);
for (int i = 0; i < keysCopy.size(); i++) {
Expression key = keysCopy.get(i);
Expression value = valuesCopy.get(i);
mv.dup();
key.gen(mv);
value.gen(mv);
MethodRef.LINKED_HASH_MAP_PUT.invokeUnchecked(mv);
mv.pop(); // pop the Object result of map.put
}
}
};
}
private static int hashMapCapacity(int expectedSize) {
if (expectedSize < 3) {
return expectedSize + 1;
}
if (expectedSize < Ints.MAX_POWER_OF_TWO) {
// This is the calculation used in JDK8 to resize when a putAll
// happens; it seems to be the most conservative calculation we
// can make. 0.75 is the default load factor.
return (int) (expectedSize / 0.75F + 1.0F);
}
return Integer.MAX_VALUE; // any large value
}
}
| |
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cluster.routing.allocation;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.routing.RoutingNodes;
import org.elasticsearch.cluster.routing.RoutingTable;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.junit.Test;
import static org.elasticsearch.cluster.ClusterState.newClusterStateBuilder;
import static org.elasticsearch.cluster.metadata.IndexMetaData.newIndexMetaDataBuilder;
import static org.elasticsearch.cluster.metadata.MetaData.newMetaDataBuilder;
import static org.elasticsearch.cluster.node.DiscoveryNodes.newNodesBuilder;
import static org.elasticsearch.cluster.routing.RoutingBuilders.routingTable;
import static org.elasticsearch.cluster.routing.ShardRoutingState.*;
import static org.elasticsearch.cluster.routing.allocation.RoutingAllocationTests.newNode;
import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.nullValue;
/**
*
*/
public class IndexBalanceTests {
private final ESLogger logger = Loggers.getLogger(IndexBalanceTests.class);
@Test
public void testBalanceAllNodesStarted() {
AllocationService strategy = new AllocationService(settingsBuilder()
.put("cluster.routing.allocation.node_concurrent_recoveries", 10)
.put("cluster.routing.allocation.node_initial_primaries_recoveries", 10)
.put("cluster.routing.allocation.allow_rebalance", "always")
.put("cluster.routing.allocation.cluster_concurrent_rebalance", -1).build());
logger.info("Building initial routing table");
MetaData metaData = newMetaDataBuilder().put(newIndexMetaDataBuilder("test").numberOfShards(3).numberOfReplicas(1))
.put(newIndexMetaDataBuilder("test1").numberOfShards(3).numberOfReplicas(1)).build();
RoutingTable routingTable = routingTable().addAsNew(metaData.index("test")).addAsNew(metaData.index("test1")).build();
ClusterState clusterState = newClusterStateBuilder().metaData(metaData).routingTable(routingTable).build();
assertThat(routingTable.index("test").shards().size(), equalTo(3));
for (int i = 0; i < routingTable.index("test").shards().size(); i++) {
assertThat(routingTable.index("test").shard(i).size(), equalTo(2));
assertThat(routingTable.index("test").shard(i).shards().size(), equalTo(2));
assertThat(routingTable.index("test").shard(i).shards().get(0).state(), equalTo(UNASSIGNED));
assertThat(routingTable.index("test").shard(i).shards().get(1).state(), equalTo(UNASSIGNED));
assertThat(routingTable.index("test").shard(i).shards().get(0).currentNodeId(), nullValue());
assertThat(routingTable.index("test").shard(i).shards().get(1).currentNodeId(), nullValue());
}
assertThat(routingTable.index("test1").shards().size(), equalTo(3));
for (int i = 0; i < routingTable.index("test1").shards().size(); i++) {
assertThat(routingTable.index("test1").shard(i).size(), equalTo(2));
assertThat(routingTable.index("test1").shard(i).shards().size(), equalTo(2));
assertThat(routingTable.index("test1").shard(i).shards().get(0).state(), equalTo(UNASSIGNED));
assertThat(routingTable.index("test1").shard(i).shards().get(1).state(), equalTo(UNASSIGNED));
assertThat(routingTable.index("test1").shard(i).shards().get(0).currentNodeId(), nullValue());
assertThat(routingTable.index("test1").shard(i).shards().get(1).currentNodeId(), nullValue());
}
logger.info("Adding three node and performing rerouting");
clusterState = newClusterStateBuilder().state(clusterState)
.nodes(newNodesBuilder().put(newNode("node1")).put(newNode("node2")).put(newNode("node3"))).build();
RoutingTable prevRoutingTable = routingTable;
routingTable = strategy.reroute(clusterState).routingTable();
clusterState = newClusterStateBuilder().state(clusterState).routingTable(routingTable).build();
assertThat(prevRoutingTable != routingTable, equalTo(true));
assertThat(routingTable.index("test").shards().size(), equalTo(3));
for (int i = 0; i < routingTable.index("test").shards().size(); i++) {
assertThat(routingTable.index("test").shard(i).size(), equalTo(2));
assertThat(routingTable.index("test").shard(i).shards().size(), equalTo(2));
assertThat(routingTable.index("test").shard(i).primaryShard().state(), equalTo(INITIALIZING));
assertThat(routingTable.index("test").shard(i).replicaShards().size(), equalTo(1));
assertThat(routingTable.index("test").shard(i).replicaShards().get(0).state(), equalTo(UNASSIGNED));
assertThat(routingTable.index("test").shard(i).replicaShards().get(0).currentNodeId(), nullValue());
}
logger.info("Another round of rebalancing");
clusterState = newClusterStateBuilder().state(clusterState).nodes(newNodesBuilder().putAll(clusterState.nodes())).build();
prevRoutingTable = routingTable;
routingTable = strategy.reroute(clusterState).routingTable();
clusterState = newClusterStateBuilder().state(clusterState).routingTable(routingTable).build();
assertThat(prevRoutingTable == routingTable, equalTo(true));
RoutingNodes routingNodes = clusterState.routingNodes();
prevRoutingTable = routingTable;
routingTable = strategy.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable();
clusterState = newClusterStateBuilder().state(clusterState).routingTable(routingTable).build();
assertThat(prevRoutingTable != routingTable, equalTo(true));
assertThat(routingTable.index("test").shards().size(), equalTo(3));
for (int i = 0; i < routingTable.index("test").shards().size(); i++) {
assertThat(routingTable.index("test").shard(i).size(), equalTo(2));
assertThat(routingTable.index("test").shard(i).shards().size(), equalTo(2));
assertThat(routingTable.index("test").shard(i).primaryShard().state(), equalTo(STARTED));
assertThat(routingTable.index("test").shard(i).replicaShards().size(), equalTo(1));
// backup shards are initializing as well, we make sure that they
// recover from primary *started* shards in the
// IndicesClusterStateService
assertThat(routingTable.index("test").shard(i).replicaShards().get(0).state(), equalTo(INITIALIZING));
}
logger.info("Reroute, nothing should change");
prevRoutingTable = routingTable;
routingTable = strategy.reroute(clusterState).routingTable();
assertThat(prevRoutingTable == routingTable, equalTo(true));
logger.info("Start the more shards");
routingNodes = clusterState.routingNodes();
prevRoutingTable = routingTable;
routingTable = strategy.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable();
clusterState = newClusterStateBuilder().state(clusterState).routingTable(routingTable).build();
routingNodes = clusterState.routingNodes();
assertThat(prevRoutingTable != routingTable, equalTo(true));
assertThat(routingTable.index("test").shards().size(), equalTo(3));
for (int i = 0; i < routingTable.index("test").shards().size(); i++) {
assertThat(routingTable.index("test").shard(i).size(), equalTo(2));
assertThat(routingTable.index("test").shard(i).shards().size(), equalTo(2));
assertThat(routingTable.index("test").shard(i).primaryShard().state(), equalTo(STARTED));
assertThat(routingTable.index("test").shard(i).replicaShards().size(), equalTo(1));
assertThat(routingTable.index("test").shard(i).replicaShards().get(0).state(), equalTo(STARTED));
}
routingTable = strategy.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable();
clusterState = newClusterStateBuilder().state(clusterState).routingTable(routingTable).build();
routingNodes = clusterState.routingNodes();
assertThat(prevRoutingTable != routingTable, equalTo(true));
assertThat(routingTable.index("test1").shards().size(), equalTo(3));
for (int i = 0; i < routingTable.index("test1").shards().size(); i++) {
assertThat(routingTable.index("test1").shard(i).size(), equalTo(2));
assertThat(routingTable.index("test1").shard(i).shards().size(), equalTo(2));
assertThat(routingTable.index("test1").shard(i).primaryShard().state(), equalTo(STARTED));
assertThat(routingTable.index("test1").shard(i).replicaShards().size(), equalTo(1));
assertThat(routingTable.index("test1").shard(i).replicaShards().get(0).state(), equalTo(STARTED));
}
assertThat(routingNodes.node("node1").numberOfShardsWithState(STARTED), equalTo(4));
assertThat(routingNodes.node("node2").numberOfShardsWithState(STARTED), equalTo(4));
assertThat(routingNodes.node("node3").numberOfShardsWithState(STARTED), equalTo(4));
assertThat(routingNodes.node("node1").shardsWithState("test", STARTED).size(), equalTo(2));
assertThat(routingNodes.node("node2").shardsWithState("test", STARTED).size(), equalTo(2));
assertThat(routingNodes.node("node3").shardsWithState("test", STARTED).size(), equalTo(2));
assertThat(routingNodes.node("node1").shardsWithState("test1", STARTED).size(), equalTo(2));
assertThat(routingNodes.node("node2").shardsWithState("test1", STARTED).size(), equalTo(2));
assertThat(routingNodes.node("node3").shardsWithState("test1", STARTED).size(), equalTo(2));
}
@Test
public void testBalanceIncrementallyStartNodes() {
AllocationService strategy = new AllocationService(settingsBuilder()
.put("cluster.routing.allocation.node_concurrent_recoveries", 10)
.put("cluster.routing.allocation.node_initial_primaries_recoveries", 10)
.put("cluster.routing.allocation.allow_rebalance", "always")
.put("cluster.routing.allocation.cluster_concurrent_rebalance", -1).build());
logger.info("Building initial routing table");
MetaData metaData = newMetaDataBuilder().put(newIndexMetaDataBuilder("test").numberOfShards(3).numberOfReplicas(1))
.put(newIndexMetaDataBuilder("test1").numberOfShards(3).numberOfReplicas(1)).build();
RoutingTable routingTable = routingTable().addAsNew(metaData.index("test")).addAsNew(metaData.index("test1")).build();
ClusterState clusterState = newClusterStateBuilder().metaData(metaData).routingTable(routingTable).build();
assertThat(routingTable.index("test").shards().size(), equalTo(3));
for (int i = 0; i < routingTable.index("test").shards().size(); i++) {
assertThat(routingTable.index("test").shard(i).size(), equalTo(2));
assertThat(routingTable.index("test").shard(i).shards().size(), equalTo(2));
assertThat(routingTable.index("test").shard(i).shards().get(0).state(), equalTo(UNASSIGNED));
assertThat(routingTable.index("test").shard(i).shards().get(1).state(), equalTo(UNASSIGNED));
assertThat(routingTable.index("test").shard(i).shards().get(0).currentNodeId(), nullValue());
assertThat(routingTable.index("test").shard(i).shards().get(1).currentNodeId(), nullValue());
}
assertThat(routingTable.index("test1").shards().size(), equalTo(3));
for (int i = 0; i < routingTable.index("test1").shards().size(); i++) {
assertThat(routingTable.index("test1").shard(i).size(), equalTo(2));
assertThat(routingTable.index("test1").shard(i).shards().size(), equalTo(2));
assertThat(routingTable.index("test1").shard(i).shards().get(0).state(), equalTo(UNASSIGNED));
assertThat(routingTable.index("test1").shard(i).shards().get(1).state(), equalTo(UNASSIGNED));
assertThat(routingTable.index("test1").shard(i).shards().get(0).currentNodeId(), nullValue());
assertThat(routingTable.index("test1").shard(i).shards().get(1).currentNodeId(), nullValue());
}
logger.info("Adding one node and performing rerouting");
clusterState = newClusterStateBuilder().state(clusterState).nodes(newNodesBuilder().put(newNode("node1"))).build();
RoutingTable prevRoutingTable = routingTable;
routingTable = strategy.reroute(clusterState).routingTable();
clusterState = newClusterStateBuilder().state(clusterState).routingTable(routingTable).build();
assertThat(prevRoutingTable != routingTable, equalTo(true));
assertThat(routingTable.index("test").shards().size(), equalTo(3));
for (int i = 0; i < routingTable.index("test").shards().size(); i++) {
assertThat(routingTable.index("test").shard(i).size(), equalTo(2));
assertThat(routingTable.index("test").shard(i).shards().size(), equalTo(2));
assertThat(routingTable.index("test").shard(i).primaryShard().state(), equalTo(INITIALIZING));
assertThat(routingTable.index("test").shard(i).replicaShards().size(), equalTo(1));
assertThat(routingTable.index("test").shard(i).replicaShards().get(0).state(), equalTo(UNASSIGNED));
assertThat(routingTable.index("test").shard(i).replicaShards().get(0).currentNodeId(), nullValue());
}
logger.info("Add another node and perform rerouting, nothing will happen since primary not started");
clusterState = newClusterStateBuilder().state(clusterState)
.nodes(newNodesBuilder().putAll(clusterState.nodes()).put(newNode("node2"))).build();
prevRoutingTable = routingTable;
routingTable = strategy.reroute(clusterState).routingTable();
clusterState = newClusterStateBuilder().state(clusterState).routingTable(routingTable).build();
assertThat(prevRoutingTable == routingTable, equalTo(true));
logger.info("Start the primary shard");
RoutingNodes routingNodes = clusterState.routingNodes();
prevRoutingTable = routingTable;
routingTable = strategy.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable();
clusterState = newClusterStateBuilder().state(clusterState).routingTable(routingTable).build();
assertThat(prevRoutingTable != routingTable, equalTo(true));
assertThat(routingTable.index("test").shards().size(), equalTo(3));
for (int i = 0; i < routingTable.index("test").shards().size(); i++) {
assertThat(routingTable.index("test").shard(i).size(), equalTo(2));
assertThat(routingTable.index("test").shard(i).shards().size(), equalTo(2));
assertThat(routingTable.index("test").shard(i).primaryShard().state(), equalTo(STARTED));
assertThat(routingTable.index("test").shard(i).replicaShards().size(), equalTo(1));
// backup shards are initializing as well, we make sure that they
// recover from primary *started* shards in the
// IndicesClusterStateService
assertThat(routingTable.index("test").shard(i).replicaShards().get(0).state(), equalTo(INITIALIZING));
}
logger.info("Reroute, nothing should change");
prevRoutingTable = routingTable;
routingTable = strategy.reroute(clusterState).routingTable();
assertThat(prevRoutingTable == routingTable, equalTo(true));
logger.info("Start the backup shard");
routingNodes = clusterState.routingNodes();
prevRoutingTable = routingTable;
routingTable = strategy.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable();
clusterState = newClusterStateBuilder().state(clusterState).routingTable(routingTable).build();
routingNodes = clusterState.routingNodes();
assertThat(prevRoutingTable != routingTable, equalTo(true));
assertThat(routingTable.index("test").shards().size(), equalTo(3));
for (int i = 0; i < routingTable.index("test").shards().size(); i++) {
assertThat(routingTable.index("test").shard(i).size(), equalTo(2));
assertThat(routingTable.index("test").shard(i).shards().size(), equalTo(2));
assertThat(routingTable.index("test").shard(i).primaryShard().state(), equalTo(STARTED));
assertThat(routingTable.index("test").shard(i).replicaShards().size(), equalTo(1));
assertThat(routingTable.index("test").shard(i).replicaShards().get(0).state(), equalTo(STARTED));
}
routingTable = strategy.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable();
clusterState = newClusterStateBuilder().state(clusterState).routingTable(routingTable).build();
routingNodes = clusterState.routingNodes();
assertThat(prevRoutingTable != routingTable, equalTo(true));
assertThat(routingTable.index("test1").shards().size(), equalTo(3));
for (int i = 0; i < routingTable.index("test1").shards().size(); i++) {
assertThat(routingTable.index("test1").shard(i).size(), equalTo(2));
assertThat(routingTable.index("test1").shard(i).shards().size(), equalTo(2));
assertThat(routingTable.index("test1").shard(i).primaryShard().state(), equalTo(STARTED));
assertThat(routingTable.index("test1").shard(i).replicaShards().size(), equalTo(1));
assertThat(routingTable.index("test1").shard(i).replicaShards().get(0).state(), equalTo(STARTED));
}
logger.info("Add another node and perform rerouting, nothing will happen since primary not started");
clusterState = newClusterStateBuilder().state(clusterState)
.nodes(newNodesBuilder().putAll(clusterState.nodes()).put(newNode("node3"))).build();
prevRoutingTable = routingTable;
routingTable = strategy.reroute(clusterState).routingTable();
clusterState = newClusterStateBuilder().state(clusterState).routingTable(routingTable).build();
assertThat(prevRoutingTable != routingTable, equalTo(true));
logger.info("Reroute, nothing should change");
prevRoutingTable = routingTable;
routingTable = strategy.reroute(clusterState).routingTable();
assertThat(prevRoutingTable == routingTable, equalTo(true));
logger.info("Start the backup shard");
routingNodes = clusterState.routingNodes();
prevRoutingTable = routingTable;
routingTable = strategy.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable();
clusterState = newClusterStateBuilder().state(clusterState).routingTable(routingTable).build();
routingNodes = clusterState.routingNodes();
assertThat(prevRoutingTable != routingTable, equalTo(true));
assertThat(routingTable.index("test").shards().size(), equalTo(3));
routingTable = strategy.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable();
clusterState = newClusterStateBuilder().state(clusterState).routingTable(routingTable).build();
routingNodes = clusterState.routingNodes();
assertThat(prevRoutingTable != routingTable, equalTo(true));
assertThat(routingTable.index("test1").shards().size(), equalTo(3));
assertThat(routingNodes.node("node1").numberOfShardsWithState(STARTED), equalTo(4));
assertThat(routingNodes.node("node2").numberOfShardsWithState(STARTED), equalTo(4));
assertThat(routingNodes.node("node3").numberOfShardsWithState(STARTED), equalTo(4));
assertThat(routingNodes.node("node1").shardsWithState("test", STARTED).size(), equalTo(2));
assertThat(routingNodes.node("node2").shardsWithState("test", STARTED).size(), equalTo(2));
assertThat(routingNodes.node("node3").shardsWithState("test", STARTED).size(), equalTo(2));
assertThat(routingNodes.node("node1").shardsWithState("test1", STARTED).size(), equalTo(2));
assertThat(routingNodes.node("node2").shardsWithState("test1", STARTED).size(), equalTo(2));
assertThat(routingNodes.node("node3").shardsWithState("test1", STARTED).size(), equalTo(2));
}
@Test
public void testBalanceAllNodesStartedAddIndex() {
AllocationService strategy = new AllocationService(settingsBuilder()
.put("cluster.routing.allocation.node_concurrent_recoveries", 10)
.put("cluster.routing.allocation.node_initial_primaries_recoveries", 10)
.put("cluster.routing.allocation.allow_rebalance", "always")
.put("cluster.routing.allocation.cluster_concurrent_rebalance", -1).build());
logger.info("Building initial routing table");
MetaData metaData = newMetaDataBuilder().put(newIndexMetaDataBuilder("test").numberOfShards(3).numberOfReplicas(1)).build();
RoutingTable routingTable = routingTable().addAsNew(metaData.index("test")).build();
ClusterState clusterState = newClusterStateBuilder().metaData(metaData).routingTable(routingTable).build();
assertThat(routingTable.index("test").shards().size(), equalTo(3));
for (int i = 0; i < routingTable.index("test").shards().size(); i++) {
assertThat(routingTable.index("test").shard(i).size(), equalTo(2));
assertThat(routingTable.index("test").shard(i).shards().size(), equalTo(2));
assertThat(routingTable.index("test").shard(i).shards().get(0).state(), equalTo(UNASSIGNED));
assertThat(routingTable.index("test").shard(i).shards().get(1).state(), equalTo(UNASSIGNED));
assertThat(routingTable.index("test").shard(i).shards().get(0).currentNodeId(), nullValue());
assertThat(routingTable.index("test").shard(i).shards().get(1).currentNodeId(), nullValue());
}
logger.info("Adding three node and performing rerouting");
clusterState = newClusterStateBuilder().state(clusterState)
.nodes(newNodesBuilder().put(newNode("node1")).put(newNode("node2")).put(newNode("node3"))).build();
RoutingTable prevRoutingTable = routingTable;
routingTable = strategy.reroute(clusterState).routingTable();
clusterState = newClusterStateBuilder().state(clusterState).routingTable(routingTable).build();
assertThat(prevRoutingTable != routingTable, equalTo(true));
assertThat(routingTable.index("test").shards().size(), equalTo(3));
for (int i = 0; i < routingTable.index("test").shards().size(); i++) {
assertThat(routingTable.index("test").shard(i).size(), equalTo(2));
assertThat(routingTable.index("test").shard(i).shards().size(), equalTo(2));
assertThat(routingTable.index("test").shard(i).primaryShard().state(), equalTo(INITIALIZING));
assertThat(routingTable.index("test").shard(i).replicaShards().size(), equalTo(1));
assertThat(routingTable.index("test").shard(i).replicaShards().get(0).state(), equalTo(UNASSIGNED));
assertThat(routingTable.index("test").shard(i).replicaShards().get(0).currentNodeId(), nullValue());
}
logger.info("Another round of rebalancing");
clusterState = newClusterStateBuilder().state(clusterState).nodes(newNodesBuilder().putAll(clusterState.nodes())).build();
prevRoutingTable = routingTable;
routingTable = strategy.reroute(clusterState).routingTable();
clusterState = newClusterStateBuilder().state(clusterState).routingTable(routingTable).build();
assertThat(prevRoutingTable == routingTable, equalTo(true));
RoutingNodes routingNodes = clusterState.routingNodes();
prevRoutingTable = routingTable;
routingTable = strategy.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable();
clusterState = newClusterStateBuilder().state(clusterState).routingTable(routingTable).build();
assertThat(prevRoutingTable != routingTable, equalTo(true));
assertThat(routingTable.index("test").shards().size(), equalTo(3));
for (int i = 0; i < routingTable.index("test").shards().size(); i++) {
assertThat(routingTable.index("test").shard(i).size(), equalTo(2));
assertThat(routingTable.index("test").shard(i).shards().size(), equalTo(2));
assertThat(routingTable.index("test").shard(i).primaryShard().state(), equalTo(STARTED));
assertThat(routingTable.index("test").shard(i).replicaShards().size(), equalTo(1));
// backup shards are initializing as well, we make sure that they
// recover from primary *started* shards in the
// IndicesClusterStateService
assertThat(routingTable.index("test").shard(i).replicaShards().get(0).state(), equalTo(INITIALIZING));
}
logger.info("Reroute, nothing should change");
prevRoutingTable = routingTable;
routingTable = strategy.reroute(clusterState).routingTable();
assertThat(prevRoutingTable == routingTable, equalTo(true));
logger.info("Start the more shards");
routingNodes = clusterState.routingNodes();
prevRoutingTable = routingTable;
routingTable = strategy.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable();
clusterState = newClusterStateBuilder().state(clusterState).routingTable(routingTable).build();
routingNodes = clusterState.routingNodes();
assertThat(prevRoutingTable != routingTable, equalTo(true));
assertThat(routingTable.index("test").shards().size(), equalTo(3));
for (int i = 0; i < routingTable.index("test").shards().size(); i++) {
assertThat(routingTable.index("test").shard(i).size(), equalTo(2));
assertThat(routingTable.index("test").shard(i).shards().size(), equalTo(2));
assertThat(routingTable.index("test").shard(i).primaryShard().state(), equalTo(STARTED));
assertThat(routingTable.index("test").shard(i).replicaShards().size(), equalTo(1));
}
routingTable = strategy.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable();
clusterState = newClusterStateBuilder().state(clusterState).routingTable(routingTable).build();
routingNodes = clusterState.routingNodes();
assertThat(prevRoutingTable != routingTable, equalTo(true));
assertThat(routingNodes.node("node1").numberOfShardsWithState(STARTED), equalTo(2));
assertThat(routingNodes.node("node2").numberOfShardsWithState(STARTED), equalTo(2));
assertThat(routingNodes.node("node3").numberOfShardsWithState(STARTED), equalTo(2));
assertThat(routingNodes.node("node1").shardsWithState("test", STARTED).size(), equalTo(2));
assertThat(routingNodes.node("node2").shardsWithState("test", STARTED).size(), equalTo(2));
assertThat(routingNodes.node("node3").shardsWithState("test", STARTED).size(), equalTo(2));
logger.info("Add new index 3 shards 1 replica");
prevRoutingTable = routingTable;
metaData = newMetaDataBuilder().metaData(metaData)
.put(newIndexMetaDataBuilder("test1").settings(ImmutableSettings.settingsBuilder()
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 3)
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1)
))
.build();
routingTable = routingTable().routingTable(routingTable)
.addAsNew(metaData.index("test1"))
.build();
clusterState = newClusterStateBuilder().state(clusterState).metaData(metaData).routingTable(routingTable).build();
assertThat(routingTable.index("test1").shards().size(), equalTo(3));
prevRoutingTable = routingTable;
routingTable = strategy.reroute(clusterState).routingTable();
clusterState = newClusterStateBuilder().state(clusterState).routingTable(routingTable).build();
assertThat(prevRoutingTable != routingTable, equalTo(true));
assertThat(routingTable.index("test1").shards().size(), equalTo(3));
for (int i = 0; i < routingTable.index("test1").shards().size(); i++) {
assertThat(routingTable.index("test1").shard(i).size(), equalTo(2));
assertThat(routingTable.index("test1").shard(i).shards().size(), equalTo(2));
assertThat(routingTable.index("test1").shard(i).primaryShard().state(), equalTo(INITIALIZING));
assertThat(routingTable.index("test1").shard(i).replicaShards().size(), equalTo(1));
assertThat(routingTable.index("test1").shard(i).replicaShards().get(0).state(), equalTo(UNASSIGNED));
assertThat(routingTable.index("test1").shard(i).replicaShards().get(0).currentNodeId(), nullValue());
}
logger.info("Another round of rebalancing");
clusterState = newClusterStateBuilder().state(clusterState).nodes(newNodesBuilder().putAll(clusterState.nodes())).build();
prevRoutingTable = routingTable;
routingTable = strategy.reroute(clusterState).routingTable();
clusterState = newClusterStateBuilder().state(clusterState).routingTable(routingTable).build();
assertThat(prevRoutingTable == routingTable, equalTo(true));
routingNodes = clusterState.routingNodes();
prevRoutingTable = routingTable;
routingTable = strategy.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable();
clusterState = newClusterStateBuilder().state(clusterState).routingTable(routingTable).build();
assertThat(prevRoutingTable != routingTable, equalTo(true));
assertThat(routingTable.index("test1").shards().size(), equalTo(3));
for (int i = 0; i < routingTable.index("test1").shards().size(); i++) {
assertThat(routingTable.index("test1").shard(i).size(), equalTo(2));
assertThat(routingTable.index("test1").shard(i).shards().size(), equalTo(2));
assertThat(routingTable.index("test1").shard(i).primaryShard().state(), equalTo(STARTED));
assertThat(routingTable.index("test1").shard(i).replicaShards().size(), equalTo(1));
// backup shards are initializing as well, we make sure that they
// recover from primary *started* shards in the
// IndicesClusterStateService
assertThat(routingTable.index("test1").shard(i).replicaShards().get(0).state(), equalTo(INITIALIZING));
}
logger.info("Reroute, nothing should change");
prevRoutingTable = routingTable;
routingTable = strategy.reroute(clusterState).routingTable();
assertThat(prevRoutingTable == routingTable, equalTo(true));
logger.info("Start the more shards");
routingNodes = clusterState.routingNodes();
prevRoutingTable = routingTable;
routingTable = strategy.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable();
clusterState = newClusterStateBuilder().state(clusterState).routingTable(routingTable).build();
routingNodes = clusterState.routingNodes();
assertThat(prevRoutingTable != routingTable, equalTo(true));
assertThat(routingTable.index("test1").shards().size(), equalTo(3));
for (int i = 0; i < routingTable.index("test1").shards().size(); i++) {
assertThat(routingTable.index("test1").shard(i).size(), equalTo(2));
assertThat(routingTable.index("test1").shard(i).shards().size(), equalTo(2));
assertThat(routingTable.index("test1").shard(i).primaryShard().state(), equalTo(STARTED));
assertThat(routingTable.index("test1").shard(i).replicaShards().size(), equalTo(1));
}
routingTable = strategy.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable();
clusterState = newClusterStateBuilder().state(clusterState).routingTable(routingTable).build();
routingNodes = clusterState.routingNodes();
assertThat(prevRoutingTable != routingTable, equalTo(true));
assertThat(routingNodes.node("node1").numberOfShardsWithState(STARTED), equalTo(4));
assertThat(routingNodes.node("node2").numberOfShardsWithState(STARTED), equalTo(4));
assertThat(routingNodes.node("node3").numberOfShardsWithState(STARTED), equalTo(4));
assertThat(routingNodes.node("node1").shardsWithState("test1", STARTED).size(), equalTo(2));
assertThat(routingNodes.node("node2").shardsWithState("test1", STARTED).size(), equalTo(2));
assertThat(routingNodes.node("node3").shardsWithState("test1", STARTED).size(), equalTo(2));
}
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.idea.svn.svnkit.lowLevel;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.util.text.StringUtil;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
/**
* Created with IntelliJ IDEA.
* User: Irina.Chernushina
* Date: 7/30/12
* Time: 6:23 PM
*
* SVNLogInputStream is not used, since it does not check available()
*
*/
public class SVNStoppableInputStream extends InputStream {
private final static Logger LOG = Logger.getInstance(SVNStoppableInputStream.class);
private final static String ourCheckAvalilable = "svn.check.available";
private final InputStream myOriginalIs;
private final InputStream myIn;
private boolean myAvailableChecked;
private final boolean myCheckAvailable;
public SVNStoppableInputStream(InputStream original, InputStream in) {
final String property = System.getProperty(ourCheckAvalilable);
myCheckAvailable = ! StringUtil.isEmptyOrSpaces(property) && Boolean.parseBoolean(property);
//myCheckAvailable = Boolean.parseBoolean(property);
myOriginalIs = myCheckAvailable ? digOriginal(original) : original;
myIn = in;
myAvailableChecked = false;
}
private InputStream digOriginal(InputStream original) {
// because of many delegates in the chain possible
InputStream current = original;
try {
while (true) {
final String name = current.getClass().getName();
if ("org.tmatesoft.svn.core.internal.io.dav.http.SpoolFile.SpoolInputStream".equals(name)) {
current = byName(current, "myCurrentInput");
} else if ("org.tmatesoft.svn.core.internal.util.ChunkedInputStream".equals(name)) {
current = byName(current, "myInputStream");
} else if ("org.tmatesoft.svn.core.internal.util.FixedSizeInputStream".equals(name)) {
current = byName(current, "mySource");
} else if (current instanceof BufferedInputStream) {
return createReadingProxy(current);
} else {
// maybe ok class, maybe some unknown proxy
Method[] methods = current.getClass().getDeclaredMethods();
for (Method method : methods) {
if ("available".equals(method.getName())) {
return current;
}
}
return createReadingProxy(current);
}
}
}
catch (NoSuchFieldException | IllegalAccessException e) {
LOG.info(e);
return createReadingProxy(current);
}
}
private InputStream createReadingProxy(final InputStream current) {
return new InputStream() {
@Override
public int read() throws IOException {
return current.read();
}
public int read(byte[] b) throws IOException {
return current.read(b);
}
public int read(byte[] b, int off, int len) throws IOException {
return current.read(b, off, len);
}
public long skip(long n) throws IOException {
return current.skip(n);
}
public void close() throws IOException {
current.close();
}
public void mark(int readlimit) {
current.mark(readlimit);
}
public void reset() throws IOException {
current.reset();
}
public boolean markSupported() {
return current.markSupported();
}
@Override
public int available() throws IOException {
return 1;
}
};
}
private InputStream byName(InputStream current, final String name) throws NoSuchFieldException, IllegalAccessException {
final Field input = current.getClass().getDeclaredField(name);
input.setAccessible(true);
current = (InputStream) input.get(current);
return current;
}
@Override
public int read() throws IOException {
waitForAvailable();
return myIn.read();
}
@Override
public int read(byte[] b, int off, int len) throws IOException {
waitForAvailable();
return myIn.read(b, off, len);
}
@Override
public long skip(long n) throws IOException {
if (n <= 0) return 0;
check();
if (available() <= 0) return 0;
return myIn.skip(n);
}
@Override
public int available() throws IOException {
check();
if (! myAvailableChecked) {
int available = myOriginalIs.available();
if (available > 0) {
myAvailableChecked = true;
}
return available;
}
return 1;
}
@Override
public void close() throws IOException {
check();
myIn.close();
}
@Override
public synchronized void mark(int readlimit) {
myIn.mark(readlimit);
}
@Override
public synchronized void reset() throws IOException {
check();
myIn.reset();
}
@Override
public boolean markSupported() {
return myIn.markSupported();
}
private void check() throws IOException {
ProgressIndicator indicator = ProgressManager.getInstance().getProgressIndicator();
if (indicator != null && indicator.isCanceled()) {
throw new IOException("Read request to canceled by user");
}
}
private void waitForAvailable() throws IOException {
if (! myCheckAvailable) return;
final Object lock = new Object();
synchronized (lock) {
while (available() <= 0) {
check();
try {
lock.wait(100);
}
catch (InterruptedException e) {
//
}
}
}
}
}
| |
/*
* Copyright (c) 2008, The Codehaus. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.codehaus.httpcache4j;
import static org.junit.Assert.*;
import org.junit.Test;
import java.time.LocalDateTime;
public class ConditionalsTest {
@Test
public void testIfMatch() {
Conditionals conditionals = new Conditionals();
conditionals = conditionals.addIfMatch(Tag.parse("\"foo\"").get());
assertEquals(1, conditionals.getMatch().size());
conditionals = conditionals.addIfMatch(Tag.parse("\"bar\"").get());
assertEquals(2, conditionals.getMatch().size());
Header header = new Header(HeaderConstants.IF_MATCH, "\"foo\",\"bar\"");
assertEquals(header, conditionals.toHeaders().getFirstHeader(HeaderConstants.IF_MATCH).get());
}
@Test
public void testIfMatchDuplicate() {
Conditionals conditionals = new Conditionals();
conditionals = conditionals.addIfMatch(Tag.parse("\"foo\"").get());
conditionals = conditionals.addIfMatch(Tag.parse("\"foo\"").get());
assertEquals(1, conditionals.getMatch().size());
}
@Test
public void testIfNoneMatchDuplicate() {
Conditionals conditionals = new Conditionals();
conditionals = conditionals.addIfNoneMatch(Tag.parse("\"foo\"").get());
conditionals = conditionals.addIfNoneMatch(Tag.parse("\"foo\"").get());
assertEquals(1, conditionals.getNoneMatch().size());
}
@Test
public void testIfMatchWithNullTag() {
Conditionals conditionals = new Conditionals();
conditionals = conditionals.addIfMatch(null);
assertEquals(1, conditionals.getMatch().size());
try {
conditionals.addIfMatch(Tag.parse("\"bar\"").get());
fail("Expected IllegalArgumentException");
}
catch (IllegalArgumentException e) {
//expected
}
assertEquals(1, conditionals.getMatch().size());
}
@Test
public void testIfNoneMatchWithNullTag() {
Conditionals conditionals = new Conditionals();
conditionals = conditionals.addIfNoneMatch(null);
assertEquals(1, conditionals.getNoneMatch().size());
try {
conditionals.addIfNoneMatch(Tag.parse("\"bar\"").get());
fail("Expected IllegalArgumentException");
}
catch (IllegalArgumentException e) {
//expected
}
assertEquals(1, conditionals.getNoneMatch().size());
}
@Test
public void testIfMatchStar() {
Conditionals conditionals = new Conditionals();
conditionals = conditionals.addIfMatch(Tag.parse("*").get());
assertEquals(1, conditionals.getMatch().size());
try {
conditionals.addIfMatch(Tag.parse("\"bar\"").get());
fail("Expected IllegalArgumentException");
}
catch (IllegalArgumentException expected) {
}
}
@Test
public void testIfNoneMatchStar() {
Conditionals conditionals = new Conditionals();
conditionals = conditionals.addIfNoneMatch(Tag.parse("*").get());
assertEquals(1, conditionals.getNoneMatch().size());
try {
conditionals.addIfNoneMatch(Tag.parse("\"bar\"").get());
fail("Expected IllegalArgumentException");
}
catch (IllegalArgumentException expected) {
}
}
@Test
public void testIfNoneMatch() {
Conditionals conditionals = new Conditionals();
conditionals = conditionals.addIfNoneMatch(Tag.parse("\"foo\"").get());
assertEquals(1, conditionals.getNoneMatch().size());
conditionals = conditionals.addIfNoneMatch(Tag.parse("\"bar\"").get());
assertEquals(2, conditionals.getNoneMatch().size());
Header header = new Header(HeaderConstants.IF_NONE_MATCH, "\"foo\",\"bar\"");
assertEquals(header, conditionals.toHeaders().getFirstHeader(HeaderConstants.IF_NONE_MATCH).get());
}
@Test
public void testIfNoneMatchAndIfMatch() {
Conditionals conditionals = new Conditionals();
conditionals = conditionals.addIfNoneMatch(Tag.parse("\"foo\"").get());
assertEquals(1, conditionals.getNoneMatch().size());
try {
conditionals.addIfMatch(Tag.parse("\"bar\"").get());
fail("Expected IllegalArgumentException");
}
catch (IllegalArgumentException expected) {
}
}
@Test
public void testIfModifiedSince() {
Conditionals conditionals = new Conditionals();
LocalDateTime dateTime = LocalDateTime.now();
conditionals = conditionals.ifModifiedSince(dateTime);
Header header = HeaderUtils.toHttpDate(HeaderConstants.IF_MODIFIED_SINCE, dateTime);
assertEquals(header, conditionals.toHeaders().getFirstHeader(HeaderConstants.IF_MODIFIED_SINCE).get());
}
@Test
public void testIfUnmodifiedSince() {
Conditionals conditionals = new Conditionals();
LocalDateTime dateTime = LocalDateTime.now();
conditionals = conditionals.ifUnModifiedSince(dateTime);
Header header = HeaderUtils.toHttpDate(HeaderConstants.IF_UNMODIFIED_SINCE, dateTime);
assertEquals(header, conditionals.toHeaders().getFirstHeader(HeaderConstants.IF_UNMODIFIED_SINCE).get());
}
@Test
public void testIfModifiedSinceAndIfMatch() {
Conditionals conditionals = new Conditionals();
conditionals = conditionals.addIfMatch(Tag.parse("\"bar\"").get());
assertEquals(1, conditionals.getMatch().size());
LocalDateTime dateTime = LocalDateTime.now();
try {
conditionals.ifModifiedSince(dateTime);
fail("Expected IllegalArgumentException");
}
catch (IllegalArgumentException expected) {
}
assertFalse(conditionals.getModifiedSince().isPresent());
}
@Test
public void testIfModifiedSinceAndIfNoneMatch() {
Conditionals conditionals = new Conditionals();
conditionals = conditionals.addIfNoneMatch(Tag.parse("\"bar\"").get());
assertEquals(1, conditionals.getNoneMatch().size());
LocalDateTime dateTime = LocalDateTime.now();
conditionals = conditionals.ifModifiedSince(dateTime);
assertTrue(conditionals.getModifiedSince().isPresent());
}
@Test
public void testIfUnModifiedSinceAndIfMatch() {
Conditionals conditionals = new Conditionals();
conditionals = conditionals.addIfMatch(Tag.parse("\"bar\"").get());
assertEquals(1, conditionals.getMatch().size());
LocalDateTime dateTime = LocalDateTime.now();
conditionals = conditionals.ifUnModifiedSince(dateTime);
assertTrue(conditionals.getUnModifiedSince().isPresent());
}
@Test
public void testIfUnModifiedSinceAndIfNoneMatch() {
Conditionals conditionals = new Conditionals();
conditionals = conditionals.addIfNoneMatch(Tag.parse("\"bar\"").get());
assertEquals(1, conditionals.getNoneMatch().size());
LocalDateTime dateTime = LocalDateTime.now();
try {
conditionals.ifUnModifiedSince(dateTime);
fail("Expected IllegalArgumentException");
}
catch (IllegalArgumentException expected) {
}
assertFalse(conditionals.getUnModifiedSince().isPresent());
}
@Test
public void testIfModifiedSinceAndIfUnmodifiedSince() {
Conditionals conditionals = new Conditionals();
LocalDateTime dateTime = LocalDateTime.now();
conditionals = conditionals.ifUnModifiedSince(dateTime);
assertNotNull(conditionals.getUnModifiedSince());
try {
conditionals.ifModifiedSince(dateTime);
fail("Expected IllegalArgumentException");
}
catch (IllegalArgumentException expected) {
}
}
@Test
public void testImmutability() {
Conditionals conditionals = new Conditionals();
LocalDateTime dateTime = LocalDateTime.now();
Conditionals conditionals2 = conditionals.ifUnModifiedSince(dateTime);
assertNotSame(conditionals, conditionals2);
assertEquals(0, conditionals.getNoneMatch().size());
assertEquals(0, conditionals.getMatch().size());
assertFalse(conditionals.getModifiedSince().isPresent());
assertFalse(conditionals.getUnModifiedSince().isPresent());
assertEquals(0, conditionals2.getNoneMatch().size());
assertEquals(0, conditionals2.getMatch().size());
assertFalse(conditionals2.getModifiedSince().isPresent());
assertTrue(conditionals2.getUnModifiedSince().isPresent());
}
}
| |
package liquibase.statementexecute;
import liquibase.database.*;
import liquibase.database.core.*;
import liquibase.database.typeconversion.TypeConverterFactory;
import liquibase.test.DatabaseTestContext;
import liquibase.statement.*;
import liquibase.statement.core.AddColumnStatement;
import liquibase.statement.core.CreateTableStatement;
import liquibase.statement.core.AddAutoIncrementStatement;
import org.junit.Test;
import java.util.ArrayList;
import java.util.List;
public class AddAutoIncrementExecuteTest extends AbstractExecuteTest {
protected static final String TABLE_NAME = "table_name";
protected static final String COLUMN_NAME = "column_name";
@Override
protected List<? extends SqlStatement> setupStatements(Database database) {
ArrayList<CreateTableStatement> statements = new ArrayList<CreateTableStatement>();
CreateTableStatement table = new CreateTableStatement(null, TABLE_NAME);
if (database instanceof MySQLDatabase) {
table.addPrimaryKeyColumn("id", TypeConverterFactory.getInstance().findTypeConverter(database).getDataType("int", false), null, "pk_", null);
} else {
table.addColumn("id", TypeConverterFactory.getInstance().findTypeConverter(database).getDataType("int", false), null, new NotNullConstraint());
}
statements.add(table);
if (database.supportsSchemas()) {
table = new CreateTableStatement(DatabaseTestContext.ALT_SCHEMA, TABLE_NAME);
table.addColumn("id", TypeConverterFactory.getInstance().findTypeConverter(database).getDataType("int", false), null, new NotNullConstraint());
statements.add(table);
}
return statements;
}
@SuppressWarnings("unchecked")
@Test
public void noSchema() throws Exception {
this.statementUnderTest = new AddAutoIncrementStatement(null, TABLE_NAME, COLUMN_NAME, "int", null, null);
assertCorrect("alter table [table_name] modify column_name serial auto_increment", PostgresDatabase.class);
assertCorrect("alter table `table_name` modify `column_name` int auto_increment", MySQLDatabase.class);
assertCorrect("ALTER TABLE [table_name] ALTER COLUMN [column_name] SET GENERATED ALWAYS AS IDENTITY", DB2Database.class);
assertCorrect("alter table table_name alter column column_name int generated by default as identity identity", HsqlDatabase.class, H2Database.class);
assertCorrect("ALTER TABLE [table_name] MODIFY [column_name] serial", InformixDatabase.class);
assertCorrectOnRest("ALTER TABLE [table_name] MODIFY [column_name] int AUTO_INCREMENT");
}
@SuppressWarnings("unchecked")
@Test
public void fullNoConstraints() throws Exception {
this.statementUnderTest = new AddColumnStatement(null, "table_name", TABLE_NAME, COLUMN_NAME, 42);
assertCorrect("alter table [table_name] add [table_name] column_name default 42 null", SybaseDatabase.class);
assertCorrect("alter table [dbo].[table_name] add [table_name] column_name constraint df_table_name_table_name default 42", MSSQLDatabase.class);
// assertCorrect("alter table [table_name] add [column_name] integer default 42", SQLiteDatabase.class);
assertCorrect("not supported. fixme!!", SQLiteDatabase.class);
assertCorrect("alter table table_name add table_name column_name default 42", PostgresDatabase.class, InformixDatabase.class, OracleDatabase.class, DerbyDatabase.class, HsqlDatabase.class, DB2Database.class, H2Database.class, CacheDatabase.class, FirebirdDatabase.class, MaxDBDatabase.class);
assertCorrect("alter table [table_name] add [table_name] column_name default 42 null", SybaseASADatabase.class);
assertCorrect("alter table `table_name` add `table_name` column_name default 42", MySQLDatabase.class);
assertCorrectOnRest("ALTER TABLE [table_name] ADD [column_name] int DEFAULT 42");
}
@SuppressWarnings("unchecked")
@Test
public void autoIncrement() throws Exception {
this.statementUnderTest = new AddColumnStatement(null, TABLE_NAME, COLUMN_NAME, "int", null, new AutoIncrementConstraint());
assertCorrect("ALTER TABLE [dbo].[table_name] ADD [column_name] int auto_increment_clause", MSSQLDatabase.class);
assertCorrect("alter table [table_name] add [column_name] int default autoincrement null", SybaseASADatabase.class);
assertCorrect("alter table [table_name] add [column_name] int identity null", SybaseDatabase.class);
assertCorrect("alter table [table_name] add [column_name] serial", PostgresDatabase.class, InformixDatabase.class);
assertCorrect("not supported. fixme!!", SQLiteDatabase.class);
assertCorrectOnRest("ALTER TABLE [table_name] ADD [column_name] int auto_increment_clause");
}
@SuppressWarnings("unchecked")
@Test
public void notNull() throws Exception {
this.statementUnderTest = new AddColumnStatement(null, TABLE_NAME, COLUMN_NAME, "int", 42, new NotNullConstraint());
assertCorrect("ALTER TABLE [table_name] ADD [column_name] int DEFAULT 42 NOT NULL", SybaseASADatabase.class, SybaseDatabase.class);
assertCorrect("alter table table_name add column_name int default 42 not null", InformixDatabase.class);
assertCorrect("alter table [dbo].[table_name] add [column_name] int not null constraint df_table_name_column_name default 42", MSSQLDatabase.class);
assertCorrect("alter table table_name add column_name int default 42 not null", OracleDatabase.class, DerbyDatabase.class, HsqlDatabase.class, DB2Database.class, H2Database.class, FirebirdDatabase.class);
assertCorrect("not supported. fixme!!", SQLiteDatabase.class);
assertCorrectOnRest("ALTER TABLE [table_name] ADD [column_name] int NOT NULL DEFAULT 42");
}
@SuppressWarnings("unchecked")
@Test
public void generateSql_primaryKey() throws Exception {
this.statementUnderTest = new AddColumnStatement(null, "table_name", "column_name", "int", null, new PrimaryKeyConstraint());
// TODO sqlserver (at least 2000) does not allows add not null column.
// this type or refactoring should include adding nullable column, updating it to some default value, and final adding primary constraint.
// assertCorrect(null, MSSQLDatabase.class);
assertCorrect("ALTER TABLE [table_name] ADD [column_name] int primary key not null", InformixDatabase.class, OracleDatabase.class, FirebirdDatabase.class, SybaseASADatabase.class, SybaseDatabase.class);
assertCorrect("ALTER TABLE [dbo].[table_name] ADD [column_name] int NOT NULL PRIMARY KEY", MSSQLDatabase.class);
assertCorrectOnRest("ALTER TABLE [table_name] ADD [column_name] int NOT NULL PRIMARY KEY");
}
// protected void setupDatabase(Database database) throws Exception {
// dropAndCreateTable(new CreateTableStatement(null, TABLE_NAME).addColumn("existingCol", "int"), database);
// dropAndCreateTable(new CreateTableStatement(TestContext.ALT_SCHEMA, TABLE_NAME).addColumn("existingCol", "int"), database);
// }
//
// protected AddColumnStatement createGeneratorUnderTest() {
// return new AddColumnStatement(null, null, null, null, null);
// }
//
// @Test
// public void execute_stringDefault() throws Exception {
// new DatabaseTestTemplate().testOnAvailableDatabases(
// new SqlStatementDatabaseTest(null, new AddColumnStatement(null, TABLE_NAME, NEW_COLUMN_NAME, "varchar(50)", "new default")) {
// protected void preExecuteAssert(DatabaseSnapshotGenerator snapshot) {
// assertNull(snapshot.getTable(TABLE_NAME).getColumn(NEW_COLUMN_NAME));
// }
//
// protected void postExecuteAssert(DatabaseSnapshotGenerator snapshot) {
// Column columnSnapshot = snapshot.getTable(TABLE_NAME).getColumn(NEW_COLUMN_NAME);
// assertNotNull(columnSnapshot);
// assertEquals(NEW_COLUMN_NAME.toUpperCase(), columnSnapshot.getName().toUpperCase());
// assertEquals("varchar".toUpperCase(), columnSnapshot.getTypeName().toUpperCase().replaceAll("VARCHAR2", "VARCHAR"));
// assertEquals(50, columnSnapshot.getColumnSize());
// assertEquals("new default", columnSnapshot.getDefaultValue());
//
// assertEquals(true, columnSnapshot.isNullable());
// }
// });
// }
//
// @Test
// public void execute_intDefault() throws Exception {
// new DatabaseTestTemplate().testOnAvailableDatabases(
// new SqlStatementDatabaseTest(null, new AddColumnStatement(null, TABLE_NAME, NEW_COLUMN_NAME, "int", 42)) {
// protected void preExecuteAssert(DatabaseSnapshotGenerator snapshot) {
// assertNull(snapshot.getTable(TABLE_NAME).getColumn(NEW_COLUMN_NAME));
// }
//
// protected void postExecuteAssert(DatabaseSnapshotGenerator snapshot) {
// Column columnSnapshot = snapshot.getTable(TABLE_NAME).getColumn(NEW_COLUMN_NAME);
// assertNotNull(columnSnapshot);
// assertEquals(NEW_COLUMN_NAME.toUpperCase(), columnSnapshot.getName().toUpperCase());
// if (snapshot.getDatabase() instanceof OracleDatabase) {
// assertEquals("NUMBER", columnSnapshot.getTypeName().toUpperCase());
// } else {
// assertTrue(columnSnapshot.getTypeName().toUpperCase().startsWith("INT"));
// }
// assertEquals(42, ((Number) columnSnapshot.getDefaultValue()).intValue());
//
// assertEquals(true, columnSnapshot.isNullable());
// }
//
// }
//
// );
// }
//
// @Test
// public void execute_floatDefault() throws Exception {
// new DatabaseTestTemplate().testOnAvailableDatabases(
// new SqlStatementDatabaseTest(null, new AddColumnStatement(null, TABLE_NAME, NEW_COLUMN_NAME, "float", 42.5)) {
// protected void preExecuteAssert(DatabaseSnapshotGenerator snapshot) {
// assertNull(snapshot.getTable(TABLE_NAME).getColumn(NEW_COLUMN_NAME));
// }
//
// protected void postExecuteAssert(DatabaseSnapshotGenerator snapshot) {
// Column columnSnapshot = snapshot.getTable(TABLE_NAME).getColumn(NEW_COLUMN_NAME);
// assertNotNull(columnSnapshot);
// assertEquals(NEW_COLUMN_NAME.toUpperCase(), columnSnapshot.getName().toUpperCase());
// assertEquals(new Double(42.5), new Double(((Number) columnSnapshot.getDefaultValue()).doubleValue()));
//
// assertEquals(true, columnSnapshot.isNullable());
// }
// });
// }
//
// @Test
// public void execute_notNull() throws Exception {
// new DatabaseTestTemplate().testOnAvailableDatabases(
// new SqlStatementDatabaseTest(null, new AddColumnStatement(null, TABLE_NAME, NEW_COLUMN_NAME, "int", 42, new NotNullConstraint())) {
// protected void preExecuteAssert(DatabaseSnapshotGenerator snapshot) {
// assertNull(snapshot.getTable(TABLE_NAME).getColumn(NEW_COLUMN_NAME));
// }
//
// protected void postExecuteAssert(DatabaseSnapshotGenerator snapshot) {
// Column columnSnapshot = snapshot.getTable(TABLE_NAME).getColumn(NEW_COLUMN_NAME);
// assertNotNull(columnSnapshot);
// assertEquals(false, columnSnapshot.isNullable());
// }
// }
//
// );
// }
//
// @Test
// public void execute_primaryKey_nonAutoIncrement() throws Exception {
// new DatabaseTestTemplate().testOnAvailableDatabases(
// new SqlStatementDatabaseTest(null, new AddColumnStatement(null, TABLE_NAME, NEW_COLUMN_NAME, "int", null, new PrimaryKeyConstraint())) {
//
// protected boolean expectedException(Database database, DatabaseException exception) {
// return (database instanceof DB2Database
// || database instanceof DerbyDatabase
// || database instanceof H2Database
// || database instanceof CacheDatabase);
// }
//
// protected void preExecuteAssert(DatabaseSnapshotGenerator snapshot) {
// assertNull(snapshot.getTable(TABLE_NAME).getColumn(NEW_COLUMN_NAME));
// }
//
// protected void postExecuteAssert(DatabaseSnapshotGenerator snapshot) {
// Column columnSnapshot = snapshot.getTable(TABLE_NAME).getColumn(NEW_COLUMN_NAME);
// assertNotNull(columnSnapshot);
// assertEquals(false, columnSnapshot.isNullable());
// assertTrue(columnSnapshot.isPrimaryKey());
// assertEquals(false, columnSnapshot.isAutoIncrement());
// }
// });
// }
//
// @Test
// public void execute_altSchema() throws Exception {
// new DatabaseTestTemplate().testOnAvailableDatabases(
// new SqlStatementDatabaseTest(TestContext.ALT_SCHEMA, new AddColumnStatement(TestContext.ALT_SCHEMA, TABLE_NAME, NEW_COLUMN_NAME, "varchar(50)", "new default")) {
// protected void preExecuteAssert(DatabaseSnapshotGenerator snapshot) {
// assertNull(snapshot.getTable(TABLE_NAME).getColumn(NEW_COLUMN_NAME));
// }
//
// protected void postExecuteAssert(DatabaseSnapshotGenerator snapshot) {
// Column columnSnapshot = snapshot.getTable(TABLE_NAME).getColumn(NEW_COLUMN_NAME);
// assertNotNull(columnSnapshot);
// assertEquals(NEW_COLUMN_NAME.toUpperCase(), columnSnapshot.getName().toUpperCase());
// assertEquals("new default", columnSnapshot.getDefaultValue());
//
// assertEquals(true, columnSnapshot.isNullable());
// }
//
// });
// }
//
// @Test
// public void execute_primaryKeyAutoIncrement() throws Exception {
// new DatabaseTestTemplate().testOnAvailableDatabases(
// new SqlStatementDatabaseTest(null, new AddColumnStatement(null, TABLE_NAME, NEW_COLUMN_NAME, "int", null, new PrimaryKeyConstraint(), new AutoIncrementConstraint())) {
//
// protected boolean expectedException(Database database, DatabaseException exception) {
// return (database instanceof DB2Database
// || database instanceof DerbyDatabase
// || database instanceof H2Database
// || database instanceof CacheDatabase
// || !database.supportsAutoIncrement());
// }
//
// protected void preExecuteAssert(DatabaseSnapshotGenerator snapshot) {
// assertNull(snapshot.getTable(TABLE_NAME).getColumn(NEW_COLUMN_NAME));
// }
//
// protected void postExecuteAssert(DatabaseSnapshotGenerator snapshot) {
// Column columnSnapshot = snapshot.getTable(TABLE_NAME).getColumn(NEW_COLUMN_NAME);
// assertNotNull(columnSnapshot);
// assertEquals(false, columnSnapshot.isNullable());
// assertTrue(columnSnapshot.isPrimaryKey());
// assertEquals(true, columnSnapshot.isAutoIncrement());
// }
// });
// }
}
| |
/*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.mturk.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mturk-requester-2017-01-17/ListReviewPolicyResultsForHIT"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ListReviewPolicyResultsForHITResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable,
Cloneable {
/**
* <p>
* The HITId of the HIT for which results have been returned.
* </p>
*/
private String hITId;
/**
* <p>
* The name of the Assignment-level Review Policy. This contains only the PolicyName element.
* </p>
*/
private ReviewPolicy assignmentReviewPolicy;
/**
* <p>
* The name of the HIT-level Review Policy. This contains only the PolicyName element.
* </p>
*/
private ReviewPolicy hITReviewPolicy;
/**
* <p>
* Contains both ReviewResult and ReviewAction elements for an Assignment.
* </p>
*/
private ReviewReport assignmentReviewReport;
/**
* <p>
* Contains both ReviewResult and ReviewAction elements for a particular HIT.
* </p>
*/
private ReviewReport hITReviewReport;
private String nextToken;
/**
* <p>
* The HITId of the HIT for which results have been returned.
* </p>
*
* @param hITId
* The HITId of the HIT for which results have been returned.
*/
public void setHITId(String hITId) {
this.hITId = hITId;
}
/**
* <p>
* The HITId of the HIT for which results have been returned.
* </p>
*
* @return The HITId of the HIT for which results have been returned.
*/
public String getHITId() {
return this.hITId;
}
/**
* <p>
* The HITId of the HIT for which results have been returned.
* </p>
*
* @param hITId
* The HITId of the HIT for which results have been returned.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListReviewPolicyResultsForHITResult withHITId(String hITId) {
setHITId(hITId);
return this;
}
/**
* <p>
* The name of the Assignment-level Review Policy. This contains only the PolicyName element.
* </p>
*
* @param assignmentReviewPolicy
* The name of the Assignment-level Review Policy. This contains only the PolicyName element.
*/
public void setAssignmentReviewPolicy(ReviewPolicy assignmentReviewPolicy) {
this.assignmentReviewPolicy = assignmentReviewPolicy;
}
/**
* <p>
* The name of the Assignment-level Review Policy. This contains only the PolicyName element.
* </p>
*
* @return The name of the Assignment-level Review Policy. This contains only the PolicyName element.
*/
public ReviewPolicy getAssignmentReviewPolicy() {
return this.assignmentReviewPolicy;
}
/**
* <p>
* The name of the Assignment-level Review Policy. This contains only the PolicyName element.
* </p>
*
* @param assignmentReviewPolicy
* The name of the Assignment-level Review Policy. This contains only the PolicyName element.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListReviewPolicyResultsForHITResult withAssignmentReviewPolicy(ReviewPolicy assignmentReviewPolicy) {
setAssignmentReviewPolicy(assignmentReviewPolicy);
return this;
}
/**
* <p>
* The name of the HIT-level Review Policy. This contains only the PolicyName element.
* </p>
*
* @param hITReviewPolicy
* The name of the HIT-level Review Policy. This contains only the PolicyName element.
*/
public void setHITReviewPolicy(ReviewPolicy hITReviewPolicy) {
this.hITReviewPolicy = hITReviewPolicy;
}
/**
* <p>
* The name of the HIT-level Review Policy. This contains only the PolicyName element.
* </p>
*
* @return The name of the HIT-level Review Policy. This contains only the PolicyName element.
*/
public ReviewPolicy getHITReviewPolicy() {
return this.hITReviewPolicy;
}
/**
* <p>
* The name of the HIT-level Review Policy. This contains only the PolicyName element.
* </p>
*
* @param hITReviewPolicy
* The name of the HIT-level Review Policy. This contains only the PolicyName element.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListReviewPolicyResultsForHITResult withHITReviewPolicy(ReviewPolicy hITReviewPolicy) {
setHITReviewPolicy(hITReviewPolicy);
return this;
}
/**
* <p>
* Contains both ReviewResult and ReviewAction elements for an Assignment.
* </p>
*
* @param assignmentReviewReport
* Contains both ReviewResult and ReviewAction elements for an Assignment.
*/
public void setAssignmentReviewReport(ReviewReport assignmentReviewReport) {
this.assignmentReviewReport = assignmentReviewReport;
}
/**
* <p>
* Contains both ReviewResult and ReviewAction elements for an Assignment.
* </p>
*
* @return Contains both ReviewResult and ReviewAction elements for an Assignment.
*/
public ReviewReport getAssignmentReviewReport() {
return this.assignmentReviewReport;
}
/**
* <p>
* Contains both ReviewResult and ReviewAction elements for an Assignment.
* </p>
*
* @param assignmentReviewReport
* Contains both ReviewResult and ReviewAction elements for an Assignment.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListReviewPolicyResultsForHITResult withAssignmentReviewReport(ReviewReport assignmentReviewReport) {
setAssignmentReviewReport(assignmentReviewReport);
return this;
}
/**
* <p>
* Contains both ReviewResult and ReviewAction elements for a particular HIT.
* </p>
*
* @param hITReviewReport
* Contains both ReviewResult and ReviewAction elements for a particular HIT.
*/
public void setHITReviewReport(ReviewReport hITReviewReport) {
this.hITReviewReport = hITReviewReport;
}
/**
* <p>
* Contains both ReviewResult and ReviewAction elements for a particular HIT.
* </p>
*
* @return Contains both ReviewResult and ReviewAction elements for a particular HIT.
*/
public ReviewReport getHITReviewReport() {
return this.hITReviewReport;
}
/**
* <p>
* Contains both ReviewResult and ReviewAction elements for a particular HIT.
* </p>
*
* @param hITReviewReport
* Contains both ReviewResult and ReviewAction elements for a particular HIT.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListReviewPolicyResultsForHITResult withHITReviewReport(ReviewReport hITReviewReport) {
setHITReviewReport(hITReviewReport);
return this;
}
/**
* @param nextToken
*/
public void setNextToken(String nextToken) {
this.nextToken = nextToken;
}
/**
* @return
*/
public String getNextToken() {
return this.nextToken;
}
/**
* @param nextToken
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListReviewPolicyResultsForHITResult withNextToken(String nextToken) {
setNextToken(nextToken);
return this;
}
/**
* Returns a string representation of this object; useful for testing and debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getHITId() != null)
sb.append("HITId: ").append(getHITId()).append(",");
if (getAssignmentReviewPolicy() != null)
sb.append("AssignmentReviewPolicy: ").append(getAssignmentReviewPolicy()).append(",");
if (getHITReviewPolicy() != null)
sb.append("HITReviewPolicy: ").append(getHITReviewPolicy()).append(",");
if (getAssignmentReviewReport() != null)
sb.append("AssignmentReviewReport: ").append(getAssignmentReviewReport()).append(",");
if (getHITReviewReport() != null)
sb.append("HITReviewReport: ").append(getHITReviewReport()).append(",");
if (getNextToken() != null)
sb.append("NextToken: ").append(getNextToken());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ListReviewPolicyResultsForHITResult == false)
return false;
ListReviewPolicyResultsForHITResult other = (ListReviewPolicyResultsForHITResult) obj;
if (other.getHITId() == null ^ this.getHITId() == null)
return false;
if (other.getHITId() != null && other.getHITId().equals(this.getHITId()) == false)
return false;
if (other.getAssignmentReviewPolicy() == null ^ this.getAssignmentReviewPolicy() == null)
return false;
if (other.getAssignmentReviewPolicy() != null && other.getAssignmentReviewPolicy().equals(this.getAssignmentReviewPolicy()) == false)
return false;
if (other.getHITReviewPolicy() == null ^ this.getHITReviewPolicy() == null)
return false;
if (other.getHITReviewPolicy() != null && other.getHITReviewPolicy().equals(this.getHITReviewPolicy()) == false)
return false;
if (other.getAssignmentReviewReport() == null ^ this.getAssignmentReviewReport() == null)
return false;
if (other.getAssignmentReviewReport() != null && other.getAssignmentReviewReport().equals(this.getAssignmentReviewReport()) == false)
return false;
if (other.getHITReviewReport() == null ^ this.getHITReviewReport() == null)
return false;
if (other.getHITReviewReport() != null && other.getHITReviewReport().equals(this.getHITReviewReport()) == false)
return false;
if (other.getNextToken() == null ^ this.getNextToken() == null)
return false;
if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getHITId() == null) ? 0 : getHITId().hashCode());
hashCode = prime * hashCode + ((getAssignmentReviewPolicy() == null) ? 0 : getAssignmentReviewPolicy().hashCode());
hashCode = prime * hashCode + ((getHITReviewPolicy() == null) ? 0 : getHITReviewPolicy().hashCode());
hashCode = prime * hashCode + ((getAssignmentReviewReport() == null) ? 0 : getAssignmentReviewReport().hashCode());
hashCode = prime * hashCode + ((getHITReviewReport() == null) ? 0 : getHITReviewReport().hashCode());
hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode());
return hashCode;
}
@Override
public ListReviewPolicyResultsForHITResult clone() {
try {
return (ListReviewPolicyResultsForHITResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| |
/* Copyright (C) 2013-2015 Computer Sciences Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. */
package ezbake.intents.common;
import com.cloudera.impala.extdatasource.thrift.TBinaryPredicate;
import com.cloudera.impala.extdatasource.thrift.TBinaryPredicateList;
import com.cloudera.impala.extdatasource.thrift.TColumnDesc;
import com.cloudera.impala.extdatasource.thrift.TTableSchema;
import ezbake.base.thrift.EzSecurityToken;
import ezbake.configuration.EzConfiguration;
import ezbake.configuration.constants.EzBakePropertyConstants;
import org.apache.thrift.TDeserializer;
import org.apache.thrift.TException;
import org.apache.thrift.TSerializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import redis.clients.jedis.Jedis;
import redis.clients.jedis.JedisPool;
import java.util.List;
import java.util.Map;
import java.util.UUID;
public class RedisUtils {
private static Logger appLog = LoggerFactory.getLogger(RedisUtils.class);
private static JedisPool jedisPool = null;
private TDeserializer deserializer;
/**
* Constructor
*
* @param configuration - EZConfiguration object which has Redis configuration
*/
public RedisUtils(EzConfiguration configuration) {
String redisHost = configuration.getProperties().getProperty(EzBakePropertyConstants.REDIS_HOST);
int redisPort = Integer.parseInt(configuration.getProperties().getProperty(EzBakePropertyConstants.REDIS_PORT));
appLog.info("Redis Host ({}) Redis Port({})", redisHost, redisPort);
jedisPool = new JedisPool(redisHost, redisPort);
deserializer = new TDeserializer();
}
/**
* put
* <p/>
* Store into Redis the value byte array using the key byte array
*
* @param key - byte array representing key
* @param value - byte array representing value
*/
public void put(byte[] key, byte[] value) {
Jedis jedis = jedisPool.getResource();
jedis.set(key, value);
jedis.close();
}
/**
* put
* <p/>
* Store into Redis the String value using the String key
*
* @param key - String representing key
* @param value - String representing value
*/
public void put(String key, String value) throws Exception {
put(key.getBytes(), value.getBytes());
}
/**
* get
* <p/>
* Retrieve from Redis the value associated with the byte array representing the key
*
* @param key - byte array representing key
* @return byte array representing value, null if key doesn't exist
*/
public byte[] get(byte[] key) {
Jedis jedis = jedisPool.getResource();
byte[] result = jedis.get(key);
jedis.close();
return result;
}
/**
* get
* <p/>
* Retrieve from Redis the String value associated with the String key
*
* @param key - String representing key
* @return String representing value, null if key doesn't exist
*/
public String get(String key) {
String resultString = null;
byte[] result = get(key.getBytes());
if (result != null) {
resultString = new String(result);
}
return resultString;
}
/**
* retrieveSecurityToken
* <p/>
* Retrieve the EzSecurityToken from Redis stored under the secUuid key
*
* @param secUuid - String key value EzSecurityToken is stored under
* @return EzSecurityToken or null if not found
*/
public EzSecurityToken retrieveSecurityToken(String secUuid) throws TException {
EzSecurityToken securityToken = null;
Jedis jedis = jedisPool.getResource();
try {
byte[] result = jedis.get(secUuid.getBytes());
if (result != null) {
securityToken = new EzSecurityToken();
deserializer.deserialize(securityToken, result);
}
} finally {
jedis.close();
}
return securityToken;
}
public boolean deleteSecurityToken(String secUuid) {
boolean result = true;
Jedis jedis = jedisPool.getResource();
if (jedis.exists(secUuid)) {
long count = jedis.del(secUuid);
if (count < 0) {
result = false;
}
}
jedis.close();
return result;
}
/**
* checkTableSchemaForSecUuid
* <p/>
* check the TTableSchema object to see if the secUUID column is present
*
* @param tableschema - TTableSchema object sent by Impala
* @return True if present, else false
*/
private boolean checkTableSchemaForSecUuid(TTableSchema tableschema) {
boolean result = false;
for (TColumnDesc columnDesc : tableschema.getCols()) {
if ("secUUID".equalsIgnoreCase(columnDesc.getName())) {
result = true;
break;
}
}
return result;
}
/**
* openImpalaAppToken
* <p/>
* Create an entry into Redis to hold pertinent info about an Impala call that an app will need to service
*
* @param tableName - The name of the table Impala is querying
* @param batchSize - Number of results each Impala call is expecting per result set
* @param tableSchema - TTableSchema object representing the requested columns from Impala
* @param predicates - The predicates to filter search results
* @return ImpalaAppToken
*/
public ImpalaAppToken openImpalaAppToken(String tableName, int batchSize, TTableSchema tableSchema, List<List<TBinaryPredicate>> predicates, String secUuid)
throws TException {
appLog.info("Opening an Impala App Token");
ImpalaAppToken appToken = new ImpalaAppToken();
String scanHandle = UUID.randomUUID().toString();
// if the secUUID is in the table schema then we need to send a ColumnData object
// back with NULL values to appease Impala
boolean secUuidColumnDataInResultSet = checkTableSchemaForSecUuid(tableSchema);
EzSecurityToken securityToken = retrieveSecurityToken(secUuid);
deleteSecurityToken(secUuid);
appToken.setScanHandle(scanHandle);
appToken.setTableName(tableName);
appToken.setTableSchema(tableSchema);
appToken.setOffset(0);
appToken.setBatchsize(batchSize);
appToken.setSecUuidInResultSet(secUuidColumnDataInResultSet);
appToken.setPredicates(predicates);
appToken.setUserToken(securityToken);
Jedis jedis = jedisPool.getResource();
try {
jedis.hset(scanHandle, Constants.TABLE_NAME_STR, tableName);
jedis.hset(scanHandle, Constants.OFFSET_STR, Integer.toString(appToken.getOffset()));
jedis.hset(scanHandle, Constants.BATCHSIZE_STR, new Integer(batchSize).toString());
jedis.hset(scanHandle, Constants.CLOSED_STR, Boolean.FALSE.toString());
jedis.hset(scanHandle, Constants.SECUUID_IN_RESULT_SET_STRING, Boolean.toString(secUuidColumnDataInResultSet));
TSerializer serializer = new TSerializer();
byte[] tableSchemaBytes = serializer.serialize(tableSchema);
jedis.hset(scanHandle.getBytes(), Constants.TABLESCHEMA_STR.getBytes(), tableSchemaBytes);
byte[] securityTokenBytes = serializer.serialize(securityToken);
jedis.hset(scanHandle.getBytes(), Constants.USER_AUTHS_STR.getBytes(), securityTokenBytes);
TBinaryPredicateList wrapper = new TBinaryPredicateList();
wrapper.setPredicates(predicates);
byte[] predicateBytes = serializer.serialize(wrapper);
jedis.hset(scanHandle.getBytes(), Constants.PREDICATES_STR.getBytes(), predicateBytes);
} finally {
jedis.close();
}
return appToken;
}
// increment the offset by page size
public void incrementImpalaAppTokenOffSet(String scanHandle, long value) {
Jedis jedis = jedisPool.getResource();
if (jedis.exists(scanHandle)) {
appLog.info("increment offset by " + value);
jedis.hincrBy(scanHandle, Constants.OFFSET_STR, value);
}
jedis.close();
}
/**
* closeImpalaAppToken
*
* @param scanhandle - used as the key in Redis where the app token is stored
* @return true if token was successfuly closed
*/
public boolean closeImpalaAppToken(String scanhandle) {
appLog.info("Closing the ImpalaAppToken");
boolean result = true;
Jedis jedis = jedisPool.getResource();
if (jedis.exists(scanhandle)) {
jedis.del(scanhandle.getBytes());
} else {
result = false;
}
jedis.close();
return result;
}
/**
* getImpalaAppToken
* <p/>
* Retrieve the app token from Redis using the scanHandle key
*
* @param scanHandle - key into Redis where token is stored
* @return ImpalaAppToken
*/
public ImpalaAppToken getImpalaAppToken(String scanHandle) throws TException {
appLog.info("getImpalaAppToken using scanHandle: {}", scanHandle);
ImpalaAppToken appToken = new ImpalaAppToken();
appToken.setScanHandle(scanHandle);
Jedis jedis = jedisPool.getResource();
Map<byte[], byte[]> m = jedis.hgetAll(scanHandle.getBytes());
try {
String tablename = new String(m.get(Constants.TABLE_NAME_STR.getBytes()));
appToken.setTableName(tablename);
appToken.setClosed(convertBytesToBoolean(m.get(Constants.CLOSED_STR.getBytes())));
appToken.setBatchsize(convertBytesToInteger(m.get(Constants.BATCHSIZE_STR.getBytes())));
appToken.setOffset(convertBytesToInteger(m.get(Constants.OFFSET_STR.getBytes())));
appToken.setPredicates(convertBytesToPredicates(m.get(Constants.PREDICATES_STR.getBytes())));
appToken.setTableSchema(convertBytesToTableSchema(m.get(Constants.TABLESCHEMA_STR.getBytes())));
appToken.setUserToken(convertBytesToEzSecurityToken(m.get(Constants.USER_AUTHS_STR.getBytes())));
appToken.setSecUuidInResultSet(convertBytesToBoolean(m.get(Constants.SECUUID_IN_RESULT_SET_STRING.getBytes())));
} finally {
jedis.close();
}
return appToken;
}
/**
* Helper method to convert byte array into Boolean
*
* @param bytes
* @return Boolean
*/
private Boolean convertBytesToBoolean(byte[] bytes) {
return Boolean.parseBoolean(new String(bytes));
}
/**
* Helper method to convert byte array into Integer
*
* @param bytes
* @return Integer
*/
private Integer convertBytesToInteger(byte[] bytes) {
return Integer.parseInt(new String(bytes));
}
/**
* Helper method to convert byte array into List<List<TBinaryPredicate>>
*
* @param bytes
* @return List<List<TBinaryPredicate>>
*/
private List<List<TBinaryPredicate>> convertBytesToPredicates(byte[] bytes) throws TException {
TBinaryPredicateList afterWrapper = new TBinaryPredicateList();
deserializer.deserialize(afterWrapper, bytes);
return afterWrapper.getPredicates();
}
/**
* Helper method to convert byte array into TTableschema
*
* @param bytes
* @return TTableSchema
*/
private TTableSchema convertBytesToTableSchema(byte[] bytes) throws TException {
TTableSchema tableSchema = new TTableSchema();
deserializer.deserialize(tableSchema, bytes);
return tableSchema;
}
/**
* Helper method to convert byte array into EzSecurityToken
*
* @param bytes
* @return EzSecurityToken
*/
private EzSecurityToken convertBytesToEzSecurityToken(byte[] bytes) throws TException {
EzSecurityToken token = new EzSecurityToken();
deserializer.deserialize(token, bytes);
return token;
}
}
| |
/*
* Copyright 2015-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.features.lua;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.junit.Assume.assumeThat;
import static org.junit.Assume.assumeTrue;
import com.facebook.buck.core.config.BuckConfig;
import com.facebook.buck.core.config.FakeBuckConfig;
import com.facebook.buck.core.rules.BuildRuleResolver;
import com.facebook.buck.core.rules.SourcePathRuleFinder;
import com.facebook.buck.core.rules.resolver.impl.TestActionGraphBuilder;
import com.facebook.buck.core.sourcepath.resolver.impl.DefaultSourcePathResolver;
import com.facebook.buck.cxx.toolchain.CxxBuckConfig;
import com.facebook.buck.cxx.toolchain.CxxPlatform;
import com.facebook.buck.cxx.toolchain.CxxPlatformUtils;
import com.facebook.buck.cxx.toolchain.DefaultCxxPlatforms;
import com.facebook.buck.cxx.toolchain.nativelink.NativeLinkStrategy;
import com.facebook.buck.io.ExecutableFinder;
import com.facebook.buck.io.FakeExecutableFinder;
import com.facebook.buck.io.filesystem.TestProjectFilesystems;
import com.facebook.buck.testutil.ParameterizedTests;
import com.facebook.buck.testutil.ProcessResult;
import com.facebook.buck.testutil.TemporaryPaths;
import com.facebook.buck.testutil.integration.ProjectWorkspace;
import com.facebook.buck.testutil.integration.TestDataHelper;
import com.facebook.buck.util.Console;
import com.facebook.buck.util.DefaultProcessExecutor;
import com.facebook.buck.util.ExitCode;
import com.facebook.buck.util.ProcessExecutor;
import com.facebook.buck.util.ProcessExecutorParams;
import com.facebook.buck.util.config.Config;
import com.facebook.buck.util.config.Configs;
import com.facebook.buck.util.environment.Platform;
import com.facebook.buck.util.json.ObjectMappers;
import com.fasterxml.jackson.core.type.TypeReference;
import com.google.common.base.Joiner;
import com.google.common.base.Splitter;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.Collection;
import java.util.Optional;
import org.hamcrest.Matchers;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@RunWith(Parameterized.class)
public class LuaBinaryIntegrationTest {
private ProjectWorkspace workspace;
private Path lua;
private boolean luaDevel;
@Parameterized.Parameters(name = "{0} {1}")
public static Collection<Object[]> data() {
return ParameterizedTests.getPermutations(
Arrays.asList(LuaBinaryDescription.StarterType.values()),
Arrays.asList(NativeLinkStrategy.values()));
}
@Parameterized.Parameter public LuaBinaryDescription.StarterType starterType;
@Parameterized.Parameter(value = 1)
public NativeLinkStrategy nativeLinkStrategy;
@Rule public TemporaryPaths tmp = new TemporaryPaths();
@Before
public void setUp() throws Exception {
// We don't currently support windows.
assumeThat(Platform.detect(), Matchers.not(Platform.WINDOWS));
// Verify that a Lua interpreter is available on the system.
ExecutableFinder finder = new ExecutableFinder();
Optional<Path> luaOptional =
finder.getOptionalExecutable(Paths.get("lua"), ImmutableMap.copyOf(System.getenv()));
assumeTrue(luaOptional.isPresent());
lua = luaOptional.get();
// Try to detect if a Lua devel package is available, which is needed to C/C++ support.
BuildRuleResolver resolver = new TestActionGraphBuilder();
CxxPlatform cxxPlatform =
DefaultCxxPlatforms.build(
Platform.detect(), new CxxBuckConfig(FakeBuckConfig.builder().build()));
ProcessExecutorParams params =
ProcessExecutorParams.builder()
.setCommand(
ImmutableList.<String>builder()
.addAll(
cxxPlatform
.getCc()
.resolve(resolver)
.getCommandPrefix(
DefaultSourcePathResolver.from(new SourcePathRuleFinder(resolver))))
.add("-includelua.h", "-E", "-")
.build())
.setRedirectInput(ProcessBuilder.Redirect.PIPE)
.build();
ProcessExecutor executor = new DefaultProcessExecutor(Console.createNullConsole());
ProcessExecutor.LaunchedProcess launchedProcess = executor.launchProcess(params);
launchedProcess.getOutputStream().close();
int exitCode = executor.waitForLaunchedProcess(launchedProcess).getExitCode();
luaDevel = exitCode == 0;
if (starterType == LuaBinaryDescription.StarterType.NATIVE) {
assumeTrue("Lua devel package required for native starter", luaDevel);
}
// Setup the workspace.
workspace = TestDataHelper.createProjectWorkspaceForScenario(this, "lua_binary", tmp);
workspace.setUp();
workspace.writeContentsToPath(
Joiner.on(System.lineSeparator())
.join(
ImmutableList.of(
"[lua]",
" starter_type = " + starterType.toString().toLowerCase(),
" native_link_strategy = " + nativeLinkStrategy.toString().toLowerCase())),
".buckconfig");
LuaPlatform platform =
getLuaBuckConfig()
.getPlatforms(
ImmutableList.of(
CxxPlatformUtils.DEFAULT_PLATFORM.withFlavor(DefaultCxxPlatforms.FLAVOR)))
.get(0);
assertThat(platform.getStarterType(), Matchers.equalTo(Optional.of(starterType)));
assertThat(platform.getNativeLinkStrategy(), Matchers.equalTo(nativeLinkStrategy));
}
@Test
public void stdout() throws Exception {
workspace.writeContentsToPath("require 'os'; io.stdout:write('hello world')", "simple.lua");
ProcessResult result = workspace.runBuckCommand("run", "//:simple").assertSuccess();
assertThat(
result.getStdout() + result.getStderr(),
result.getStdout().trim(),
Matchers.equalTo("hello world"));
}
@Test
public void stderr() throws Exception {
workspace.writeContentsToPath("require 'os'; io.stderr:write('hello world')", "simple.lua");
Path path = workspace.buildAndReturnOutput("//:simple");
ProcessExecutor.Result result = workspace.runCommand(path.toString());
assertThat(
result.getStdout().orElse("") + result.getStderr().orElse(""),
result.getStderr().orElse("").trim(),
Matchers.endsWith("hello world"));
}
@Test
public void errorCode() throws Exception {
workspace.writeContentsToPath("require 'os'\nos.exit(5)", "simple.lua");
workspace.runBuckBuild("//:simple").assertSuccess();
ProcessResult result = workspace.runBuckCommand("run", "//:simple");
assertEquals(result.getExitCode(), ExitCode.BUILD_ERROR);
}
@Test
public void error() throws Exception {
workspace.writeContentsToPath("blah blah garbage", "simple.lua");
workspace.runBuckBuild("//:simple").assertSuccess();
workspace.runBuckCommand("run", "//:simple").assertFailure();
}
@Test
public void args() throws Exception {
workspace.writeContentsToPath("for i=-1,#arg do print(arg[i]) end", "simple.lua");
Path arg0 = workspace.buildAndReturnOutput("//:simple");
// no args...
ProcessResult result = workspace.runBuckCommand("run", "//:simple").assertSuccess();
assertThat(
result.getStdout() + result.getStderr(),
Splitter.on(System.lineSeparator()).splitToList(result.getStdout().trim()),
Matchers.contains(
ImmutableList.of(
Matchers.anyOf(Matchers.equalTo(lua.toString()), Matchers.equalTo("nil")),
Matchers.endsWith(arg0.toString()))));
// with args...
result = workspace.runBuckCommand("run", "//:simple", "--", "hello", "world").assertSuccess();
assertThat(
result.getStdout() + result.getStderr(),
Splitter.on(System.lineSeparator()).splitToList(result.getStdout().trim()),
Matchers.contains(
ImmutableList.of(
Matchers.anyOf(Matchers.equalTo(lua.toString()), Matchers.equalTo("nil")),
Matchers.endsWith(arg0.toString()),
Matchers.equalTo("hello"),
Matchers.equalTo("world"))));
}
@Test
public void nativeExtension() throws Exception {
assumeTrue(luaDevel);
ProcessResult result = workspace.runBuckCommand("run", "//:native").assertSuccess();
assertThat(
result.getStdout() + result.getStderr(),
result.getStdout().trim(),
Matchers.equalTo("hello world"));
}
@Test
public void nativeExtensionWithDep() throws Exception {
assumeThat(starterType, Matchers.not(Matchers.equalTo(LuaBinaryDescription.StarterType.PURE)));
assumeTrue(luaDevel);
ProcessResult result = workspace.runBuckCommand("run", "//:native_with_dep").assertSuccess();
assertThat(
result.getStdout() + result.getStderr(),
result.getStdout().trim(),
Matchers.equalTo("hello world"));
}
@Test
public void packagedFormat() throws Exception {
Path output =
workspace.buildAndReturnOutput(
"-c", "lua.package_style=standalone", "-c", "lua.packager=//:packager", "//:simple");
ImmutableMap<String, ImmutableMap<String, String>> components =
ObjectMappers.readValue(
output, new TypeReference<ImmutableMap<String, ImmutableMap<String, String>>>() {});
assertThat(components.get("modules").keySet(), Matchers.equalTo(ImmutableSet.of("simple.lua")));
}
@Test
@SuppressWarnings("PMD.UseAssertEqualsInsteadOfAssertTrue")
public void switchingBetweenPacakgedFormats() throws Exception {
// Run an inital build using the standalone packaging style.
String standaloneFirst =
workspace.getFileContents(
workspace.buildAndReturnOutput(
"-c",
"lua.package_style=standalone",
"-c",
"lua.packager=//:packager",
"//:simple"));
// Now rebuild with just changing to an in-place packaging style.
String inplaceFirst =
workspace.getFileContents(
workspace.buildAndReturnOutput("-c", "lua.package_style=inplace", "//:simple"));
// Now rebuild again, switching back to standalone, and verify the output matches the original
// build's output.
String standaloneSecond =
workspace.getFileContents(
workspace.buildAndReturnOutput(
"-c",
"lua.package_style=standalone",
"-c",
"lua.packager=//:packager",
"//:simple"));
assertTrue(standaloneFirst.equals(standaloneSecond));
// Now rebuild again, switching back to in-place, and verify the output matches the original
// build's output.
String inplaceSecond =
workspace.getFileContents(
workspace.buildAndReturnOutput("-c", "lua.package_style=inplace", "//:simple"));
assertTrue(inplaceFirst.equals(inplaceSecond));
}
@Test
public void usedInGenruleCommand() throws IOException {
assumeTrue(luaDevel);
workspace.writeContentsToPath("require 'os'; io.stdout:write('okay')", "simple.lua");
Path output = workspace.buildAndReturnOutput("//:genrule");
assertEquals("okay", workspace.getFileContents(output));
}
private LuaBuckConfig getLuaBuckConfig() throws IOException {
Config rawConfig = Configs.createDefaultConfig(tmp.getRoot());
BuckConfig buckConfig =
FakeBuckConfig.builder()
.setEnvironment(ImmutableMap.of())
.setSections(rawConfig.getRawConfig())
.setFilesystem(TestProjectFilesystems.createProjectFilesystem(tmp.getRoot()))
.build();
return new LuaBuckConfig(buckConfig, new FakeExecutableFinder(ImmutableList.of()));
}
}
| |
package ut.com.atlassian.maven.plugins.jgitflow.manager;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.security.SecureRandom;
import java.util.*;
import com.atlassian.jgitflow.core.JGitFlow;
import com.atlassian.maven.plugins.jgitflow.ReleaseContext;
import com.atlassian.maven.plugins.jgitflow.helper.SessionAndProjects;
import com.atlassian.maven.plugins.jgitflow.manager.FlowReleaseManager;
import com.google.common.base.Strings;
import org.apache.commons.io.IOUtils;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.ArtifactUtils;
import org.apache.maven.artifact.factory.ArtifactFactory;
import org.apache.maven.artifact.manager.WagonManager;
import org.apache.maven.artifact.metadata.ArtifactMetadataSource;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.artifact.repository.DefaultArtifactRepository;
import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
import org.apache.maven.artifact.repository.layout.DefaultRepositoryLayout;
import org.apache.maven.artifact.resolver.ArtifactCollector;
import org.apache.maven.artifact.resolver.ArtifactResolutionResult;
import org.apache.maven.artifact.versioning.InvalidVersionSpecificationException;
import org.apache.maven.artifact.versioning.VersionRange;
import org.apache.maven.execution.MavenSession;
import org.apache.maven.model.Dependency;
import org.apache.maven.model.DependencyManagement;
import org.apache.maven.model.Profile;
import org.apache.maven.model.Repository;
import org.apache.maven.profiles.DefaultProfileManager;
import org.apache.maven.profiles.ProfileManager;
import org.apache.maven.project.MavenProject;
import org.apache.maven.project.MavenProjectBuilder;
import org.apache.maven.project.ProjectBuildingException;
import org.apache.maven.project.ProjectSorter;
import org.apache.maven.settings.Settings;
import org.apache.maven.shared.release.util.ReleaseUtil;
import org.codehaus.plexus.PlexusJUnit4TestCase;
import org.codehaus.plexus.context.ContextException;
import org.codehaus.plexus.context.DefaultContext;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Contextualizable;
import org.codehaus.plexus.util.FileUtils;
import org.codehaus.plexus.util.StringUtils;
import org.junit.After;
import org.junit.Before;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
/**
* @since version
*/
public abstract class AbstractFlowManagerTest extends PlexusJUnit4TestCase
{
protected MavenProjectBuilder projectBuilder;
protected ArtifactRepository localRepository;
protected File testFileBase;
private static final SecureRandom random = new SecureRandom();
public static final String PROJECT_BASEDIR = "";
private static final DefaultContext EMPTY_CONTEXT = new DefaultContext()
{
public Object get( Object key ) throws ContextException
{
return null;
}
};
@Before
public void doSetup() throws Exception
{
projectBuilder = (MavenProjectBuilder) lookup(MavenProjectBuilder.ROLE);
ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) lookup(ArtifactRepositoryLayout.ROLE, "default");
String localRepoPath = getTestFile(PROJECT_BASEDIR + "target/local-repository").getAbsolutePath().replace('\\', '/');
localRepository = new DefaultArtifactRepository("local", "file://" + localRepoPath, layout);
this.testFileBase = newTempDir();
}
@After
public void doTearDown() throws Exception
{
((Contextualizable) projectBuilder).contextualize(EMPTY_CONTEXT);
((Contextualizable) lookup(WagonManager.ROLE)).contextualize(EMPTY_CONTEXT);
// if(null != testFileBase && testFileBase.exists())
// {
// try
// {
// FileUtils.deleteDirectory(testFileBase);
// }
// catch (IOException e)
// {
// //ignore
// }
// }
}
@Override
protected InputStream getCustomConfiguration() throws Exception
{
String configBase = System.getProperty("basedir","");
if(!configBase.endsWith("/"))
{
configBase = configBase + "/";
}
return org.apache.commons.io.FileUtils.openInputStream(new File(configBase + "target/components.xml"));
}
protected SessionAndProjects basicReleaseRewriteTest(String projectName) throws Exception
{
return basicReleaseRewriteTest(projectName,"");
}
protected SessionAndProjects basicReleaseRewriteTest(String projectName, String releaseVersion) throws Exception
{
List<MavenProject> projects = createReactorProjects("rewrite-for-release",projectName);
File projectRoot = projects.get(0).getBasedir();
ReleaseContext ctx = new ReleaseContext(projectRoot);
if(!Strings.isNullOrEmpty(releaseVersion))
{
ctx.setDefaultReleaseVersion(releaseVersion);
}
ctx.setInteractive(false).setNoTag(true);
return basicReleaseRewriteTest(projectName, ctx);
}
protected SessionAndProjects basicReleaseRewriteTest(String projectName, ReleaseContext ctx) throws Exception
{
List<MavenProject> projects = createReactorProjects("rewrite-for-release",projectName);
File projectRoot = ctx.getBaseDir();
JGitFlow flow = JGitFlow.getOrInit(projectRoot);
flow.git().checkout().setName(flow.getDevelopBranchName()).call();
assertOnDevelop(flow);
initialCommitAll(flow);
FlowReleaseManager relman = getReleaseManager();
MavenSession session = new MavenSession(getContainer(),new Settings(),localRepository,null,null,null,projectRoot.getAbsolutePath(),new Properties(),new Properties(), null);
relman.start(ctx,projects,session);
assertOnRelease(flow, ctx.getDefaultReleaseVersion());
compareSnapPomFiles(projects);
assertTrue(flow.git().status().call().isClean());
flow.git().checkout().setName(flow.getDevelopBranchName()).call();
compareDevPomFiles(projects);
return new SessionAndProjects(session,projects);
}
protected void basicHotfixRewriteTest(String projectName) throws Exception
{
basicHotfixRewriteTest(projectName,"");
}
protected void basicHotfixRewriteTest(String projectName, String releaseVersion) throws Exception
{
List<MavenProject> projects = createReactorProjects("rewrite-for-hotfix",projectName);
File projectRoot = projects.get(0).getBasedir();
ReleaseContext ctx = new ReleaseContext(projectRoot);
if(!Strings.isNullOrEmpty(releaseVersion))
{
ctx.setDefaultReleaseVersion(releaseVersion);
}
ctx.setInteractive(false).setNoTag(true);
basicHotfixRewriteTest(projectName, ctx);
}
protected void basicHotfixRewriteTest(String projectName, ReleaseContext ctx) throws Exception
{
List<MavenProject> projects = createReactorProjects("rewrite-for-hotfix",projectName);
File projectRoot = ctx.getBaseDir();
JGitFlow flow = JGitFlow.getOrInit(projectRoot);
flow.git().checkout().setName(flow.getMasterBranchName()).call();
assertOnMaster(flow);
initialCommitAll(flow);
FlowReleaseManager relman = getHotfixManager();
MavenSession session = new MavenSession(getContainer(),new Settings(),localRepository,null,null,null,projectRoot.getAbsolutePath(),new Properties(),new Properties(), null);
ctx.setInteractive(false);
relman.start(ctx,projects,session);
assertOnHotfix(flow);
compareSnapPomFiles(projects);
assertTrue(flow.git().status().call().isClean());
}
protected void initialCommitAll(JGitFlow flow) throws Exception
{
commitAll(flow, "Initial commit");
}
protected void commitAll(JGitFlow flow, String message) throws Exception
{
flow.git().add().addFilepattern(".").call();
flow.git().commit().setMessage(message).call();
}
protected void assertOnDevelop(JGitFlow flow) throws Exception
{
assertEquals(flow.getDevelopBranchName(), flow.git().getRepository().getBranch());
}
protected void assertOnMaster(JGitFlow flow) throws Exception
{
assertEquals(flow.getMasterBranchName(), flow.git().getRepository().getBranch());
}
protected void assertOnRelease(JGitFlow flow, String version) throws Exception
{
if(Strings.isNullOrEmpty(version))
{
assertTrue(flow.git().getRepository().getBranch().startsWith(flow.getReleaseBranchPrefix()));
}
else
{
assertEquals(flow.getReleaseBranchPrefix() + version, flow.git().getRepository().getBranch());
}
}
protected void assertOnFeature(JGitFlow flow, String feature) throws Exception
{
if(Strings.isNullOrEmpty(feature))
{
assertTrue(flow.git().getRepository().getBranch().startsWith(flow.getFeatureBranchPrefix()));
}
else
{
assertEquals(flow.getFeatureBranchPrefix() + feature, flow.git().getRepository().getBranch());
}
}
protected void assertOnHotfix(JGitFlow flow) throws Exception
{
assertTrue(flow.git().getRepository().getBranch().contains(flow.getHotfixBranchPrefix()));
}
protected FlowReleaseManager getReleaseManager() throws Exception
{
return (FlowReleaseManager) lookup(FlowReleaseManager.class.getName(),"release");
}
protected FlowReleaseManager getHotfixManager() throws Exception
{
return (FlowReleaseManager) lookup(FlowReleaseManager.class.getName(),"hotfix");
}
protected FlowReleaseManager getFeatureManager() throws Exception
{
return (FlowReleaseManager) lookup(FlowReleaseManager.class.getName(),"feature");
}
protected String readTestProjectFile(String fileName) throws IOException
{
return ReleaseUtil.readXmlFile(getTestFile(PROJECT_BASEDIR + "target/test-classes/projects/" + fileName));
}
protected void copyTestProject(String path, String subpath) throws IOException
{
File testResourcesDir = getTestFile(PROJECT_BASEDIR + "src/test/resources/");
File resourceDir = null;
File targetDir = null;
if(Strings.isNullOrEmpty(subpath))
{
resourceDir = new File( testResourcesDir, "projects/" + path + "/" );
targetDir = new File( testFileBase, "projects/" + path + "/" );
}
else
{
resourceDir = new File( testResourcesDir, "projects/" + path + "/" + subpath + "/" );
targetDir = new File( testFileBase, "projects/" + path + "/" + subpath + "/" );
}
FileUtils.copyDirectoryStructure(resourceDir, targetDir);
}
protected List<MavenProject> createReactorProjects(String path, String subpath) throws Exception
{
return createReactorProjects(path, path, subpath, true);
}
protected List<MavenProject> createReactorProjects(String path, String subpath, boolean clean) throws Exception
{
return createReactorProjects(path, path, subpath, clean);
}
protected List<MavenProject> createReactorProjectsNoClean(String path, String subpath) throws Exception
{
return createReactorProjects(path, path, subpath, false);
}
protected List<MavenProject> createReactorProjects( String path, String targetPath, String subpath, boolean clean )
throws Exception
{
File testFile = null;
if(Strings.isNullOrEmpty(subpath))
{
testFile = new File( testFileBase, "projects/" + path + "/pom.xml" );
}
else
{
testFile = new File( testFileBase, "projects/" + path + "/" + subpath + "/pom.xml" );
}
Stack<File> projectFiles = new Stack<File>();
projectFiles.push( testFile );
List<DefaultArtifactRepository> repos =
Collections.singletonList( new DefaultArtifactRepository( "central", getRemoteRepositoryURL(), new DefaultRepositoryLayout() ) );
Repository repository = new Repository();
repository.setId( "central" );
repository.setUrl( getRemoteRepositoryURL() );
ProfileManager profileManager = new DefaultProfileManager( getContainer() );
Profile profile = new Profile();
profile.setId( "profile" );
profile.addRepository( repository );
profileManager.addProfile( profile );
profileManager.activateAsDefault( profile.getId() );
List<MavenProject> reactorProjects = new ArrayList<MavenProject>();
String cleaned = "";
while ( !projectFiles.isEmpty() )
{
File file = (File) projectFiles.pop();
// Recopy the test resources since they are modified in some tests
//FileUtils.copyDirectory(srcDir,file.getParentFile());
String filePath = file.getPath();
int index = filePath.indexOf( "projects" );
filePath = filePath.substring( index ).replace( '\\', '/' );
File newFile = new File( testFileBase, StringUtils.replace( filePath, path, targetPath ) );
if(clean && !cleaned.equals(newFile.getParentFile().getName()))
{
//clean the parent dir
newFile.mkdirs();
FileUtils.cleanDirectory(newFile.getParentFile());
File srcDir = new File(getTestFile(PROJECT_BASEDIR + "src/test/resources/"),filePath).getParentFile();
FileUtils.copyDirectoryStructure(srcDir, newFile.getParentFile());
cleaned = newFile.getParentFile().getName();
}
MavenProject project = projectBuilder.build( newFile, localRepository, profileManager );
for ( Iterator i = project.getModules().iterator(); i.hasNext(); )
{
String module = (String) i.next();
File moduleFile = new File( file.getParentFile(), module);
if(moduleFile.isFile()){
projectFiles.push( moduleFile );
}else{
projectFiles.push( new File( moduleFile, "/pom.xml" ) );
}
}
reactorProjects.add( project );
}
ProjectSorter sorter = new ProjectSorter( reactorProjects );
reactorProjects = sorter.getSortedProjects();
ArtifactFactory artifactFactory = (ArtifactFactory) lookup( ArtifactFactory.ROLE );
ArtifactCollector artifactCollector = (ArtifactCollector) lookup( ArtifactCollector.class.getName() );
ArtifactMetadataSource artifactMetadataSource = (ArtifactMetadataSource) lookup( ArtifactMetadataSource.ROLE );
// pass back over and resolve dependencies - can't be done earlier as the order may not be correct
for ( Iterator i = reactorProjects.iterator(); i.hasNext(); )
{
MavenProject project = (MavenProject) i.next();
project.setRemoteArtifactRepositories( repos );
project.setPluginArtifactRepositories( repos );
Artifact projectArtifact = project.getArtifact();
Map managedVersions = createManagedVersionMap(
ArtifactUtils.versionlessKey(projectArtifact.getGroupId(), projectArtifact.getArtifactId()),
project.getDependencyManagement(), artifactFactory );
project.setDependencyArtifacts( project.createArtifacts( artifactFactory, null, null ) );
ArtifactResolutionResult result = artifactCollector.collect( project.getDependencyArtifacts(),
projectArtifact, managedVersions,
localRepository, repos, artifactMetadataSource,
null, Collections.EMPTY_LIST );
project.setArtifacts( result.getArtifacts() );
}
return reactorProjects;
}
private Map<String,Artifact> createManagedVersionMap(String projectId, DependencyManagement dependencyManagement, ArtifactFactory artifactFactory) throws ProjectBuildingException
{
Map<String,Artifact> map;
if ( dependencyManagement != null && dependencyManagement.getDependencies() != null )
{
map = new HashMap<String,Artifact>();
for ( Iterator i = dependencyManagement.getDependencies().iterator(); i.hasNext(); )
{
Dependency d = (Dependency) i.next();
try
{
VersionRange versionRange = VersionRange.createFromVersionSpec( d.getVersion() );
Artifact artifact = artifactFactory.createDependencyArtifact( d.getGroupId(), d.getArtifactId(),
versionRange, d.getType(),
d.getClassifier(), d.getScope() );
map.put( d.getManagementKey(), artifact );
}
catch ( InvalidVersionSpecificationException e )
{
throw new ProjectBuildingException( projectId, "Unable to parse version '" + d.getVersion() +
"' for dependency '" + d.getManagementKey() + "': " + e.getMessage(), e );
}
}
}
else
{
map = Collections.emptyMap();
}
return map;
}
private String getRemoteRepositoryURL() throws IOException
{
File testFile = getTestFile(PROJECT_BASEDIR + "src/test/remote-repository" );
if (testFile.getAbsolutePath().equals( testFile.getCanonicalPath() ) )
{
return "file://" + getTestFile(PROJECT_BASEDIR + "src/test/remote-repository" ).getAbsolutePath().replace( '\\', '/' );
}
return "file://" + getTestFile(PROJECT_BASEDIR + "src/test/remote-repository" ).getCanonicalPath().replace( '\\', '/' );
}
protected void comparePomFiles(List<MavenProject> reactorProjects)throws IOException
{
for (MavenProject project : reactorProjects)
{
comparePomFiles(project);
}
}
protected void compareDevPomFiles(List<MavenProject> reactorProjects)throws IOException
{
for (MavenProject project : reactorProjects)
{
compareDevPomFiles(project);
}
}
protected void compareSnapPomFiles(List<MavenProject> reactorProjects)throws IOException
{
for (MavenProject project : reactorProjects)
{
compareSnapPomFiles(project);
}
}
protected void comparePomFiles(MavenProject project) throws IOException
{
File actualFile = project.getFile();
File expectedFile = new File(actualFile.getParentFile(), "expected-nosnap-"+actualFile.getName() );
comparePomFiles(expectedFile, actualFile);
}
protected void compareDevPomFiles(MavenProject project) throws IOException
{
File actualFile = project.getFile();
File expectedFile = new File(actualFile.getParentFile(), "expected-dev-"+actualFile.getName() );
comparePomFiles(expectedFile, actualFile);
}
protected void compareSnapPomFiles(MavenProject project) throws IOException
{
File actualFile = project.getFile();
File expectedFile = new File(actualFile.getParentFile(), "expected-snap-"+actualFile.getName() );
comparePomFiles(expectedFile, actualFile);
}
protected void comparePomFiles(File expectedFile, File actualFile) throws IOException
{
String expectedPom = ReleaseUtil.readXmlFile(expectedFile);
String actualPom = ReleaseUtil.readXmlFile(actualFile);
assertEquals(expectedPom,actualPom);
}
public void updatePomVersion(File pomFile, String oldVersion,String newVersion) throws IOException
{
String xmlString = org.apache.commons.io.FileUtils.readFileToString(pomFile);
String updatedXml = org.apache.commons.lang.StringUtils.replace(xmlString, "<version>" + oldVersion + "</version>", "<version>" + newVersion + "</version>");
org.apache.commons.io.FileUtils.writeStringToFile(pomFile, updatedXml);
}
public File newTempDir()
{
File baseDir = new File(System.getProperty("java.io.tmpdir"));
String name = randomName("mvngf-");
File tmp = new File(baseDir,name);
tmp.mkdirs();
return tmp;
}
public File newDir(String name)
{
return new File(testFileBase,name);
}
public File newDir()
{
return newDir(randomName("mvngftest"));
}
private String randomName(String base)
{
long n = random.nextLong();
n = (n == Long.MIN_VALUE) ? 0 : Math.abs(n);
return base + Long.toString(n);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.cql3.validation.entities;
import java.util.UUID;
import org.junit.BeforeClass;
import org.junit.Test;
import org.apache.cassandra.cql3.CQLTester;
import org.apache.cassandra.dht.ByteOrderedPartitioner;
import org.apache.cassandra.service.StorageService;
public class UserTypesTest extends CQLTester
{
@BeforeClass
public static void setUpClass()
{
// Selecting partitioner for a table is not exposed on CREATE TABLE.
StorageService.instance.setPartitionerUnsafe(ByteOrderedPartitioner.instance);
}
@Test
public void testInvalidField() throws Throwable
{
String myType = createType("CREATE TYPE %s (f int)");
createTable("CREATE TABLE %s (k int PRIMARY KEY, v frozen<" + myType + ">)");
// 's' is not a field of myType
assertInvalid("INSERT INTO %s (k, v) VALUES (?, {s : ?})", 0, 1);
}
@Test
public void testCassandra8105() throws Throwable
{
String ut1 = createType("CREATE TYPE %s (a int, b int)");
String ut2 = createType("CREATE TYPE %s (j frozen<" + KEYSPACE + "." + ut1 + ">, k int)");
createTable("CREATE TABLE %s (x int PRIMARY KEY, y set<frozen<" + KEYSPACE + "." + ut2 + ">>)");
execute("INSERT INTO %s (x, y) VALUES (1, { { k: 1 } })");
String ut3 = createType("CREATE TYPE %s (a int, b int)");
String ut4 = createType("CREATE TYPE %s (j frozen<" + KEYSPACE + "." + ut3 + ">, k int)");
createTable("CREATE TABLE %s (x int PRIMARY KEY, y list<frozen<" + KEYSPACE + "." + ut4 + ">>)");
execute("INSERT INTO %s (x, y) VALUES (1, [ { k: 1 } ])");
String ut5 = createType("CREATE TYPE %s (a int, b int)");
String ut6 = createType("CREATE TYPE %s (i int, j frozen<" + KEYSPACE + "." + ut5 + ">)");
createTable("CREATE TABLE %s (x int PRIMARY KEY, y set<frozen<" + KEYSPACE + "." + ut6 + ">>)");
execute("INSERT INTO %s (x, y) VALUES (1, { { i: 1 } })");
}
@Test
public void testFor7684() throws Throwable
{
String myType = createType("CREATE TYPE %s (x double)");
createTable("CREATE TABLE %s (k int, v frozen<" + myType + ">, b boolean static, PRIMARY KEY (k, v))");
execute("INSERT INTO %s(k, v) VALUES (?, {x:?})", 1, -104.99251);
execute("UPDATE %s SET b = ? WHERE k = ?", true, 1);
assertRows(execute("SELECT v.x FROM %s WHERE k = ? AND v = {x:?}", 1, -104.99251),
row(-104.99251)
);
flush();
assertRows(execute("SELECT v.x FROM %s WHERE k = ? AND v = {x:?}", 1, -104.99251),
row(-104.99251)
);
}
@Test
public void testCreateInvalidTablesWithUDT() throws Throwable
{
String myType = createType("CREATE TYPE %s (f int)");
// Using a UDT without frozen shouldn't work
assertInvalidMessage("Non-frozen User-Defined types are not supported, please use frozen<>",
"CREATE TABLE " + KEYSPACE + ".wrong (k int PRIMARY KEY, v " + KEYSPACE + '.' + myType + ")");
assertInvalidMessage("Statement on keyspace " + KEYSPACE + " cannot refer to a user type in keyspace otherkeyspace;" +
" user types can only be used in the keyspace they are defined in",
"CREATE TABLE " + KEYSPACE + ".wrong (k int PRIMARY KEY, v frozen<otherKeyspace.myType>)");
assertInvalidMessage("Unknown type " + KEYSPACE + ".unknowntype",
"CREATE TABLE " + KEYSPACE + ".wrong (k int PRIMARY KEY, v frozen<" + KEYSPACE + '.' + "unknownType>)");
}
@Test
public void testAlterUDT() throws Throwable
{
String myType = KEYSPACE + '.' + createType("CREATE TYPE %s (a int)");
createTable("CREATE TABLE %s (a int PRIMARY KEY, b frozen<" + myType + ">)");
execute("INSERT INTO %s (a, b) VALUES (1, {a: 1})");
assertRows(execute("SELECT b.a FROM %s"), row(1));
flush();
execute("ALTER TYPE " + myType + " ADD b int");
execute("INSERT INTO %s (a, b) VALUES (2, {a: 2, b :2})");
assertRows(execute("SELECT b.a, b.b FROM %s"),
row(1, null),
row(2, 2));
flush();
assertRows(execute("SELECT b.a, b.b FROM %s"),
row(1, null),
row(2, 2));
}
@Test
public void testUDTWithUnsetValues() throws Throwable
{
// set up
String myType = createType("CREATE TYPE %s (x int, y int)");
String myOtherType = createType("CREATE TYPE %s (a frozen<" + myType + ">)");
createTable("CREATE TABLE %s (k int PRIMARY KEY, v frozen<" + myType + ">, z frozen<" + myOtherType + ">)");
assertInvalidMessage("Invalid unset value for field 'y' of user defined type " + myType,
"INSERT INTO %s (k, v) VALUES (10, {x:?, y:?})", 1, unset());
assertInvalidMessage("Invalid unset value for field 'y' of user defined type " + myType,
"INSERT INTO %s (k, v, z) VALUES (10, {x:?, y:?}, {a:{x: ?, y: ?}})", 1, 1, 1, unset());
}
@Test
public void testAlteringUserTypeNestedWithinMap() throws Throwable
{
// test frozen and non-frozen collections
String[] columnTypePrefixes = {"frozen<map<text, ", "map<text, frozen<"};
for (String columnTypePrefix : columnTypePrefixes)
{
String ut1 = createType("CREATE TYPE %s (a int)");
String columnType = columnTypePrefix + KEYSPACE + "." + ut1 + ">>";
createTable("CREATE TABLE %s (x int PRIMARY KEY, y " + columnType + ")");
execute("INSERT INTO %s (x, y) VALUES(1, {'firstValue':{a:1}})");
assertRows(execute("SELECT * FROM %s"), row(1, map("firstValue", userType(1))));
flush();
execute("ALTER TYPE " + KEYSPACE + "." + ut1 + " ADD b int");
execute("INSERT INTO %s (x, y) VALUES(2, {'secondValue':{a:2, b:2}})");
execute("INSERT INTO %s (x, y) VALUES(3, {'thirdValue':{a:3}})");
execute("INSERT INTO %s (x, y) VALUES(4, {'fourthValue':{b:4}})");
assertRows(execute("SELECT * FROM %s"),
row(1, map("firstValue", userType(1))),
row(2, map("secondValue", userType(2, 2))),
row(3, map("thirdValue", userType(3, null))),
row(4, map("fourthValue", userType(null, 4))));
flush();
assertRows(execute("SELECT * FROM %s"),
row(1, map("firstValue", userType(1))),
row(2, map("secondValue", userType(2, 2))),
row(3, map("thirdValue", userType(3, null))),
row(4, map("fourthValue", userType(null, 4))));
}
}
@Test
public void testAlteringUserTypeNestedWithinSet() throws Throwable
{
// test frozen and non-frozen collections
String[] columnTypePrefixes = {"frozen<set<", "set<frozen<"};
for (String columnTypePrefix : columnTypePrefixes)
{
String ut1 = createType("CREATE TYPE %s (a int)");
String columnType = columnTypePrefix + KEYSPACE + "." + ut1 + ">>";
createTable("CREATE TABLE %s (x int PRIMARY KEY, y " + columnType + ")");
execute("INSERT INTO %s (x, y) VALUES(1, {1} )");
assertRows(execute("SELECT * FROM %s"), row(1, set(userType(1))));
flush();
execute("ALTER TYPE " + KEYSPACE + "." + ut1 + " ADD b int");
execute("INSERT INTO %s (x, y) VALUES(2, {{a:2, b:2}})");
execute("INSERT INTO %s (x, y) VALUES(3, {{a:3}})");
execute("INSERT INTO %s (x, y) VALUES(4, {{b:4}})");
assertRows(execute("SELECT * FROM %s"),
row(1, set(userType(1))),
row(2, set(userType(2, 2))),
row(3, set(userType(3, null))),
row(4, set(userType(null, 4))));
flush();
assertRows(execute("SELECT * FROM %s"),
row(1, set(userType(1))),
row(2, set(userType(2, 2))),
row(3, set(userType(3, null))),
row(4, set(userType(null, 4))));
}
}
@Test
public void testAlteringUserTypeNestedWithinList() throws Throwable
{
// test frozen and non-frozen collections
String[] columnTypePrefixes = {"frozen<list<", "list<frozen<"};
for (String columnTypePrefix : columnTypePrefixes)
{
String ut1 = createType("CREATE TYPE %s (a int)");
String columnType = columnTypePrefix + KEYSPACE + "." + ut1 + ">>";
createTable("CREATE TABLE %s (x int PRIMARY KEY, y " + columnType + ")");
execute("INSERT INTO %s (x, y) VALUES(1, [1] )");
assertRows(execute("SELECT * FROM %s"), row(1, list(userType(1))));
flush();
execute("ALTER TYPE " + KEYSPACE + "." + ut1 + " ADD b int");
execute("INSERT INTO %s (x, y) VALUES(2, [{a:2, b:2}])");
execute("INSERT INTO %s (x, y) VALUES(3, [{a:3}])");
execute("INSERT INTO %s (x, y) VALUES(4, [{b:4}])");
assertRows(execute("SELECT * FROM %s"),
row(1, list(userType(1))),
row(2, list(userType(2, 2))),
row(3, list(userType(3, null))),
row(4, list(userType(null, 4))));
flush();
assertRows(execute("SELECT * FROM %s"),
row(1, list(userType(1))),
row(2, list(userType(2, 2))),
row(3, list(userType(3, null))),
row(4, list(userType(null, 4))));
}
}
@Test
public void testAlteringUserTypeNestedWithinTuple() throws Throwable
{
String type = createType("CREATE TYPE %s (a int, b int)");
createTable("CREATE TABLE %s (a int PRIMARY KEY, b frozen<tuple<int, " + KEYSPACE + "." + type + ">>)");
execute("INSERT INTO %s (a, b) VALUES(1, (1, {a:1, b:1}))");
assertRows(execute("SELECT * FROM %s"), row(1, tuple(1, userType(1, 1))));
flush();
execute("ALTER TYPE " + KEYSPACE + "." + type + " ADD c int");
execute("INSERT INTO %s (a, b) VALUES(2, (2, {a: 2, b: 2, c: 2}))");
execute("INSERT INTO %s (a, b) VALUES(3, (3, {a: 3, b: 3}))");
execute("INSERT INTO %s (a, b) VALUES(4, (4, {b:4}))");
assertRows(execute("SELECT * FROM %s"),
row(1, tuple(1, userType(1, 1))),
row(2, tuple(2, userType(2, 2, 2))),
row(3, tuple(3, userType(3, 3, null))),
row(4, tuple(4, userType(null, 4, null))));
flush();
assertRows(execute("SELECT * FROM %s"),
row(1, tuple(1, userType(1, 1))),
row(2, tuple(2, userType(2, 2, 2))),
row(3, tuple(3, userType(3, 3, null))),
row(4, tuple(4, userType(null, 4, null))));
}
@Test
public void testAlteringUserTypeNestedWithinNestedTuple() throws Throwable
{
String type = createType("CREATE TYPE %s (a int, b int)");
createTable("CREATE TABLE %s (a int PRIMARY KEY, b frozen<tuple<int, tuple<int, " + KEYSPACE + "." + type + ">>>)");
execute("INSERT INTO %s (a, b) VALUES(1, (1, (1, {a:1, b:1})))");
assertRows(execute("SELECT * FROM %s"), row(1, tuple(1, tuple(1, userType(1, 1)))));
flush();
execute("ALTER TYPE " + KEYSPACE + "." + type + " ADD c int");
execute("INSERT INTO %s (a, b) VALUES(2, (2, (1, {a: 2, b: 2, c: 2})))");
execute("INSERT INTO %s (a, b) VALUES(3, (3, (1, {a: 3, b: 3})))");
execute("INSERT INTO %s (a, b) VALUES(4, (4, (1, {b:4})))");
assertRows(execute("SELECT * FROM %s"),
row(1, tuple(1, tuple(1, userType(1, 1)))),
row(2, tuple(2, tuple(1, userType(2, 2, 2)))),
row(3, tuple(3, tuple(1, userType(3, 3, null)))),
row(4, tuple(4, tuple(1, userType(null, 4, null)))));
flush();
assertRows(execute("SELECT * FROM %s"),
row(1, tuple(1, tuple(1, userType(1, 1)))),
row(2, tuple(2, tuple(1, userType(2, 2, 2)))),
row(3, tuple(3, tuple(1, userType(3, 3, null)))),
row(4, tuple(4, tuple(1, userType(null, 4, null)))));
}
@Test
public void testAlteringUserTypeNestedWithinUserType() throws Throwable
{
String type = createType("CREATE TYPE %s (a int, b int)");
String otherType = createType("CREATE TYPE %s (x frozen<" + KEYSPACE + "." + type + ">)");
createTable("CREATE TABLE %s (a int PRIMARY KEY, b frozen<" + KEYSPACE + "." + otherType + ">)");
execute("INSERT INTO %s (a, b) VALUES(1, {x: {a:1, b:1}})");
assertRows(execute("SELECT b.x.a, b.x.b FROM %s"), row(1, 1));
flush();
execute("ALTER TYPE " + KEYSPACE + "." + type + " ADD c int");
execute("INSERT INTO %s (a, b) VALUES(2, {x: {a: 2, b: 2, c: 2}})");
execute("INSERT INTO %s (a, b) VALUES(3, {x: {a: 3, b: 3}})");
execute("INSERT INTO %s (a, b) VALUES(4, {x: {b:4}})");
assertRows(execute("SELECT b.x.a, b.x.b, b.x.c FROM %s"),
row(1, 1, null),
row(2, 2, 2),
row(3, 3, null),
row(null, 4, null));
flush();
assertRows(execute("SELECT b.x.a, b.x.b, b.x.c FROM %s"),
row(1, 1, null),
row(2, 2, 2),
row(3, 3, null),
row(null, 4, null));
}
/**
* Migrated from cql_tests.py:TestCQL.user_types_test()
*/
@Test
public void testUserTypes() throws Throwable
{
UUID userID_1 = UUID.fromString("550e8400-e29b-41d4-a716-446655440000");
String addressType = createType("CREATE TYPE %s (street text, city text, zip_code int, phones set<text >)");
String nameType = createType("CREATE TYPE %s (firstname text, lastname text)");
createTable("CREATE TABLE %s (id uuid PRIMARY KEY, name frozen < " + nameType + " >, addresses map < text, frozen < " + addressType + " >> )");
execute("INSERT INTO %s (id, name) VALUES(?, { firstname: 'Paul', lastname: 'smith' } )", userID_1);
assertRows(execute("SELECT name.firstname FROM %s WHERE id = ?", userID_1), row("Paul"));
execute("UPDATE %s SET addresses = addresses + { 'home': { street: '...', city:'SF', zip_code:94102, phones:{ } } } WHERE id = ?", userID_1);
// TODO: deserialize the value here and check it 's right.
execute("SELECT addresses FROM %s WHERE id = ? ", userID_1);
}
/**
* Test user type test that does a little more nesting,
* migrated from cql_tests.py:TestCQL.more_user_types_test()
*/
@Test
public void testNestedUserTypes() throws Throwable
{
String type1 = createType("CREATE TYPE %s ( s set<text>, m map<text, text>, l list<text>)");
String type2 = createType("CREATE TYPE %s ( s set < frozen < " + type1 + " >>,)");
createTable("CREATE TABLE %s (id int PRIMARY KEY, val frozen<" + type2 + ">)");
execute("INSERT INTO %s (id, val) VALUES (0, { s : {{ s : {'foo', 'bar'}, m : { 'foo' : 'bar' }, l : ['foo', 'bar']} }})");
// TODO: check result once we have an easy way to do it. For now we just check it doesn't crash
execute("SELECT * FROM %s");
}
/**
* Migrated from cql_tests.py:TestCQL.add_field_to_udt_test()
*/
@Test
public void testAddFieldToUdt() throws Throwable
{
String typeName = createType("CREATE TYPE %s (fooint int, fooset set <text>)");
createTable("CREATE TABLE %s (key int PRIMARY KEY, data frozen <" + typeName + ">)");
execute("INSERT INTO %s (key, data) VALUES (1, {fooint: 1, fooset: {'2'}})");
execute("ALTER TYPE " + keyspace() + "." + typeName + " ADD foomap map <int,text>");
execute("INSERT INTO %s (key, data) VALUES (1, {fooint: 1, fooset: {'2'}, foomap: {3 : 'bar'}})");
}
}
| |
/**
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at the
* <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache homepage</a>
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Initial code contributed and copyrighted by<br>
* BPS Bildungsportal Sachsen GmbH, http://www.bps-system.de
* <p>
*/
package de.bps.webservices.clients.onyxreporter;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FilenameFilter;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.net.ssl.HostnameVerifier;
import javax.net.ssl.HttpsURLConnection;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLSession;
import org.apache.commons.io.IOUtils;
import org.olat.core.id.Identity;
import org.olat.core.id.OLATResourceable;
import org.olat.core.id.UserConstants;
import org.olat.core.logging.OLog;
import org.olat.core.logging.Tracing;
import org.olat.core.util.WebappHelper;
import org.olat.course.nodes.AssessableCourseNode;
import org.olat.course.nodes.CourseNode;
import org.olat.course.nodes.iq.IQEditController;
import org.olat.fileresource.FileResourceManager;
import org.olat.ims.qti.QTIResultSet;
import org.olat.repository.RepositoryEntry;
import de.bps.onyx.plugin.OnyxModule;
import de.bps.onyx.plugin.OnyxResultManager;
import de.bps.security.SSLConfigurationModule;
//<ONYX-705>
public class OnyxReporterConnector {
//<OLATCE-1124>
private static final String FOUR = "4";
private static final String ONE = "1";
private static final String FIVE = "5";
//</OLATCE-1124>
//<OLATCE-1089>
private static final String NONAME = "NONAME";
//</OLATCE-1089>
private String surveyFolderPath;
private final static OLog log = Tracing.createLoggerFor(OnyxReporterConnector.class);
private final OnyxReporterClient connector;
private final Pattern pattern = Pattern.compile("(.*v)(.*)\\.(xml|zip)");
public OnyxReporterConnector() throws OnyxReporterException {
//TODO check if available
if(isServiceAvailable(OnyxReporterTarget.getTarget())) {
connector = new OnyxReporterClient();
} else {
//<OLATCE-1124>
log.error("OnyxReporterService is unavailable! Tried to use: "+OnyxReporterTarget.getTarget());
//</OLATCE-1124>
throw new OnyxReporterException("Unable to connect to OnyxReporter");
}
}
/**
* Delivers a map with all possible outcome-variables of this onyx test.
* @param node The course node with the Onyx test.
* @return A map with all outcome-variables with name as key and type as value.
*/
//<OLATCE-1012> split the method
// <OLATBPS-363>
public Map<String, String> getPossibleOutcomeVariables(CourseNode node) throws OnyxReporterException {
// </OLATBPS-363>
RepositoryEntry repositoryEntry = node.getReferencedRepositoryEntry();
return this.getPossibleOutcomeVariables(repositoryEntry);
}
//<OLATCE-1012>
//<OLATCE-1012>
/**
* Delivers a map with all possible outcome-variables of this onyx test.
* @param Repoentry with the Onyx test.
* @return A map with all outcome-variables with name as key and type as value.
*/
public Map<String, String> getPossibleOutcomeVariables(RepositoryEntry entry) throws OnyxReporterException {
OnyxReporterServices reporterService = connector.getService();
HashMapWrapper resultVariables = reporterService.getResultVariables(1, getContentPackage(entry), new HashMapWrapper());
HashMap<String, String> results;
try {
results = resultVariables.getMap();
} catch (OnyxReporterException e) {
log.error("Error in getPossibleOutcomeVariables reporter conversation! RepositoryEntry: " + entry.getResourceableId(), e);
results = new HashMap<String, String>();
}
return results;
}
//<OLATCE-1012>
// <OLATBPS-363>
public Map<String, String> getResults(File resultXml, CourseNode node) throws OnyxReporterException {
// </OLATBPS-363>
return getResults(resultXml, node, null);
}
public Map<String, String> getResults(AssessableCourseNode node, Identity identity) throws OnyxReporterException {
//<OLATCE-1073>
File resultXml = getResultFile(identity.getName(), node.getModuleConfiguration().get(IQEditController.CONFIG_KEY_TYPE).toString(), node, 0);
return getResults(resultXml, node, identity);
//</OLATCE-1073>
}
// <OLATBPS-363>
public Map<String, String> getResults(File resultXml, CourseNode node, Identity identity) throws OnyxReporterException {
// </OLATBPS-363>
//<OLATCE-1073>
if(resultXml == null){
log.info("Missing resultFile! For "+(identity!=null?identity.getName():"NULL")+" node : "+(node!=null?(node.getShortName()+":"+node.getIdent()):"NULL"));
return new HashMap<String, String>(0);
}
//</OLATCE-1073>
RepositoryEntry repositoryEntry = node.getReferencedRepositoryEntry();
OnyxReporterServices reporterService = connector.getService();
/** ARM SITE **/
String[] dlh = armSite(reporterService, identity, ReporterRole.AUTHOR);
String secret = dlh[0];
String sessionId = dlh[1];
/** Prepare data **/
ArrayList<ResultsForStudent> resForStudents = new ArrayList<ResultsForStudent>(4);
resForStudents.add(getStudentWithResult(identity, resultXml));
ResultsForStudentsWrapper wrapper = new ResultsForStudentsWrapper();
wrapper.setStudents(resForStudents);
/** INITIATE SITE **/
reporterService.initiateSite(1, sessionId, secret, wrapper, getContentPackage(repositoryEntry), new HashMapWrapper());
HashMapWrapper mapWrapper = reporterService.getResultValues(1, sessionId, secret, new HashMapWrapper(), new HashMapWrapper());
Map<String, String> results;
try {
results = mapWrapper.getMap();
} catch (OnyxReporterException e) {
log.error("Error in getResults reporter conversation! Session: " + sessionId + ", Identity: " + identity.getName(), e);
throw new OnyxReporterException("Error getting results for test! Session: " + sessionId, e);
}
return results;
}
/**
* This method starts the OnyxReporter and returns the link to it.
*
* @param students
* The students to show the results for.
* @param node
* The AssessableCourseNode to get the nodeId and to get the
* (OnyxTest) RepositoryEntry.
* @param role
* defines for which role and which resulting view the reporter
* should be called
* @param ureq
* The UserRequest for getting the identity and role of the
* current user.
* @return the Link to the reporter.
*/
public String startReporterGUI(Identity caller, List<Identity> students, CourseNode node, Long assessmentId, ReporterRole role)
throws OnyxReporterException {
String link = "";
RepositoryEntry repositoryEntry = node.getReferencedRepositoryEntry();
ArrayList<ResultsForStudent> resForStudents = null;
if(surveyFolderPath == null){
resForStudents = getStudentsWithResults(students, node, assessmentId);
} else {
resForStudents = getAnonymizedStudentsWithResultsForSurvey(node.getIdent());
}
try {
OnyxReporterServices reporterService = connector.getService();
HashMapWrapper mapWrapper = new HashMapWrapper();
if (ReporterRole.ASSESSMENT == role) {
HashMap<String, String> underlyingMap = new HashMap<String, String>();
if (assessmentId != null) {
underlyingMap.put("assessmentID", String.valueOf(assessmentId));
}
underlyingMap.put("providerID", OnyxModule.getConfigName());
mapWrapper.setMap(underlyingMap);
}
String[] dlh = armSite(reporterService, caller, role, mapWrapper);
byte[] contentPackage = getContentPackage(repositoryEntry);
ResultsForStudentsWrapper wrapper = new ResultsForStudentsWrapper();
wrapper.setStudents(resForStudents);
link = reporterService.initiateSite(1, dlh[1], dlh[0], wrapper, contentPackage, mapWrapper);
if (link == null) {
throw new OnyxReporterException("Unable to start ReporterGUI! Could not resolve reporter URL!");
} else if (link.indexOf("reportererror") >= 0) {
// use error link to show reporter error page
} else {
//<OLATCE-1124>
if (ReporterRole.REPORTING == role) {
link += FIVE; // view 5 for reporting view / statistical evaluation
} else if (ReporterRole.STUDENT == role) {
link += ONE; // view 1 (single learner view)
} else {
link += FOUR; // view 4 (all learners overview)
}
//</OLATCE-1124>
//add params
link += "?sid=" + dlh[1] + "&secret=" + dlh[0];
//switch to the student view of a specified student
if (ReporterRole.STUDENT == role) {
//link += "&uid="+ students.get(0).getKey();
link += "&uid=" + assessmentId;
}
// add language information
final String lang = caller.getUser().getPreferences().getLanguage();
if (lang != null && !lang.isEmpty()) {
link += "&lang=" + lang;
}
}
} catch (Exception e) {
throw new OnyxReporterException("Unable to start ReporterGUI!", e);
}
return link;
}
// <OLATCE-498>
public boolean hasResults(String username, String assessmentType, CourseNode node) {
return getResultFile(username, assessmentType, node, 0) != null;
}
// </OLATCE-498>
public String startReporterGUIForSurvey(Identity caller, CourseNode node, String resultsPath) throws OnyxReporterException{
this.surveyFolderPath = resultsPath;
//<OLATCE-1124>
return startReporterGUI(caller, null, node, null, ReporterRole.REPORTING);
//</OLATCE-1124>
}
private byte[] getContentPackage(RepositoryEntry repositoryEntry){
File cpFile = FileResourceManager.getInstance().getFileResource(repositoryEntry.getOlatResource());
if(cpFile==null || !cpFile.exists()){
cpFile = getCP(repositoryEntry);
}
Long fileLength = cpFile.length();
byte[] contentPackage = new byte[fileLength.intValue()];
java.io.FileInputStream inp = null;
try {
inp = new java.io.FileInputStream(cpFile);
inp.read(contentPackage);
} catch (FileNotFoundException e) {
log.error("Missing file: "+cpFile.getAbsolutePath(),e);
} catch (IOException e) {
log.error("Error copying file: "+cpFile.getAbsolutePath(),e);
} finally {
IOUtils.closeQuietly(inp);
}
return contentPackage;
}
private String[] armSite(OnyxReporterServices reporterService, Identity caller, ReporterRole role) {
return armSite(reporterService, caller, role, new HashMapWrapper());
}
private String[] armSite(OnyxReporterServices reporterService, Identity caller, ReporterRole role, HashMapWrapper wrapper) {
String secret = "" + new Random().nextLong();
//<OLATCE-1089>
String lastname = caller.getUser().getProperty(UserConstants.LASTNAME, null);
String firstname = caller.getUser().getProperty(UserConstants.FIRSTNAME,null);
lastname=lastname!=null&&lastname.length()>0?lastname:NONAME;
firstname=firstname!=null&&firstname.length()>0?firstname:NONAME;
String reporterSessionId = reporterService.armSite(1, caller.getName(), role.getKey(), secret,
lastname, firstname, wrapper);
//</OLATCE-1089>
return new String[]{secret, reporterSessionId!=null?reporterSessionId:"dummy"};
}
private ResultsForStudent getStudentWithResult(Identity student, File resultFile){
ResultsForStudent resForStudent = null;
Long fileLength = resultFile.length();
byte[] resultFileStream = new byte[fileLength.intValue()];
java.io.FileInputStream inp = null;
try {
inp = new java.io.FileInputStream(resultFile);
inp.read(resultFileStream);
//<OLATCE-1089>
String lastname = student.getUser().getProperty(UserConstants.LASTNAME, null);
String firstname = student.getUser().getProperty(UserConstants.FIRSTNAME,null);
lastname=lastname!=null&&lastname.length()>0?lastname:NONAME;
firstname=firstname!=null&&firstname.length()>0?firstname:NONAME;
resForStudent = new ResultsForStudent();
//<OLATCE-1169>
resForStudent.setFirstname(firstname);
resForStudent.setLastname(lastname);
//</OLATCE-1169>
//</OLATCE-1089>
//resForStudent.setStudentId(String.valueOf(student.getKey()));
String filename = resultFile.getName();
Matcher matcher = pattern.matcher(filename);
String assessmentId = null;
if (matcher.matches()) {
assessmentId = matcher.group(2);
} else {
final Long key = student.getKey();
log.warn("Could not determine assessment ID from unexpected file name " + filename);
assessmentId = String.valueOf(key);
}
resForStudent.setStudentId(assessmentId);
resForStudent.setGroupname("");
resForStudent.setTutorname("");
resForStudent.setResultsFile(resultFileStream);
} catch (FileNotFoundException e) {
log.error("Missing file: "+resultFile.getAbsolutePath(),e);
} catch (IOException e) {
log.error("Error copying file: "+resultFile.getAbsolutePath(),e);
} finally {
IOUtils.closeQuietly(inp);
}
return resForStudent;
}
private ArrayList<ResultsForStudent> getStudentsWithResults(List<Identity> students, CourseNode node, Long assessmentId){
ArrayList<ResultsForStudent> resForStudents = new ArrayList<ResultsForStudent>();
for(Identity student : students){
File resultFile = getResultFile(student.getName(), node.getModuleConfiguration().get(IQEditController.CONFIG_KEY_TYPE).toString(), node,
assessmentId != null ? assessmentId : 0);
//<OLATCE-1048>
if(resultFile != null)
{
resForStudents.add(getStudentWithResult(student, resultFile));
}
//</OLATCE-1048>
}
return resForStudents;
}
/**
* For every result xml file found in the survey folder a dummy student is created.
* @param nodeId
* @return
*/
private ArrayList<ResultsForStudent> getAnonymizedStudentsWithResultsForSurvey(String nodeId) {
ArrayList<ResultsForStudent> serviceStudents = new ArrayList<ResultsForStudent>();
File directory = new File(this.surveyFolderPath);
Long fileLength;
File resultFile;
if(directory.exists()) {
String[] allXmls = directory.list(new OnyxReporterConnectorFileNameFilter(nodeId));
if (allXmls != null && allXmls.length > 0) {
int id = 0;
for (String xmlFileName : allXmls) {
ResultsForStudent serviceStudent = new ResultsForStudent();
serviceStudent.setFirstname("");
serviceStudent.setLastname("");
serviceStudent.setGroupname("");
serviceStudent.setTutorname("");
serviceStudent.setStudentId("st" + id);
resultFile = new File(this.surveyFolderPath + xmlFileName);
fileLength = resultFile.length();
byte[] resultFileStream = new byte[fileLength.intValue()];
java.io.FileInputStream inp;
try {
inp = new java.io.FileInputStream(resultFile);
inp.read(resultFileStream);
serviceStudent.setResultsFile(resultFileStream);
serviceStudents.add(serviceStudent);
id++;
} catch (FileNotFoundException e) {
log.error("Missing file: "+resultFile.getAbsolutePath(),e);
} catch (IOException e) {
log.error("Error copying file: "+resultFile.getAbsolutePath(),e);
}
}
}
}
return serviceStudents;
}
public static String getFilePath(String username, String assessmentType) {
new File(WebappHelper.getUserDataRoot());
return OnyxResultManager.getResReporting() + File.separator + username + File.separator + assessmentType + File.separator;
}
public static File getResultFileOrNull(QTIResultSet set, CourseNode node) {
if (set == null || node == null) {
return null;
}
return getResultFileOrNull(set.getIdentity().getName(), node.getModuleConfiguration().get(IQEditController.CONFIG_KEY_TYPE).toString(), node,
set.getAssessmentID());
}
private static File getResultFileOrNull(String username, String assessmentType, CourseNode node, long assessmentId) {
File xml = null;
String prefix;
String path = getFilePath(username, assessmentType);
//if an assessment id was given, use the corresponding file
if (assessmentId != 0) {
prefix = OnyxResultManager.getResultsFilenamePrefix(path, node, assessmentId);
xml = new File(WebappHelper.getUserDataRoot(), prefix + OnyxResultManager.SUFFIX_ZIP);
if (xml.exists()) {
return xml;
}
// fall back to "old" xml implementation
xml = new File(WebappHelper.getUserDataRoot(), prefix + OnyxResultManager.SUFFIX_XML);
}
return xml;
}
private File getResultFile(String username, String assessmentType, CourseNode node, long assessmentId) {
File fUserdataRoot = new File(WebappHelper.getUserDataRoot());
String path = getFilePath(username, assessmentType);
File xml = getResultFileOrNull(username, assessmentType, node, assessmentId);
//otherwise search the newest result file with this node id in this directory
if (xml == null || !(xml.exists())) {
File directory = new File(fUserdataRoot, path);
String[] allXmls = directory.list(new OnyxReporterConnectorFileNameFilter(node.getIdent()));
if (allXmls != null && allXmls.length > 0) {
File newestXml = new File(fUserdataRoot, path + allXmls[0]);
File newestZip = null;
/*
* Search for newest file in array. If ZIP files are found,
* prefer them. Use XML files otherwise.
*/
for (String xmlFileName : allXmls) {
File xmlFile = new File(fUserdataRoot, path + xmlFileName);
Matcher matcher = pattern.matcher(xmlFileName);
String currentAssessmentId = null;
if (matcher.matches()) {
currentAssessmentId = matcher.group(2);
QTIResultSet resultSet = OnyxResultManager.getResultSet(Long.parseLong(currentAssessmentId));
if (resultSet != null && !resultSet.getSuspended()) {
if (xmlFileName.endsWith(OnyxResultManager.SUFFIX_ZIP)) {
if (newestZip == null) {
newestZip = xmlFile;
} else {
if (xmlFile.lastModified() > newestZip.lastModified()) {
newestZip = xmlFile;
}
}
} else if (xmlFile.lastModified() > newestXml.lastModified()) {
newestXml = xmlFile;
}
} else {
log.info("Skip suspended result : " + xmlFile);
}
}
}
if (newestZip != null) {
xml = newestZip;
} else {
xml = newestXml;
}
}
}
if (xml == null || !(xml.exists())) {
//<OLATCE-1048>
xml = null;
//</OLATCE-1048>
//log.error("There is no file for this test and student "+username+" assessmentType: "+assessmentType+ " nodeId: "+nodeId+" assessmentId: "+assessmentId);
}
return xml;
}
/**
* Generates a file object for the given re.
* @param repositoryEntry
* @return
*/
private File getCP(RepositoryEntry repositoryEntry) {
//get content-package (= onyx test zip-file)
OLATResourceable fileResource = repositoryEntry.getOlatResource();
String unzipedDir = FileResourceManager.getInstance().unzipFileResource(fileResource).getAbsolutePath();
String zipdirName = FileResourceManager.ZIPDIR;
String testName = repositoryEntry.getResourcename();
String pathToFile = unzipedDir.substring(0, unzipedDir.indexOf(zipdirName));
File onyxTestZip = new File(pathToFile + testName);
// <OLATCE-499>
if (!onyxTestZip.exists()) {
onyxTestZip = new File(pathToFile + "repo.zip");
}
// </OLATCE-499>
return onyxTestZip;
}
private boolean isServiceAvailable(String target) {
HostnameVerifier hv = new HostnameVerifier() {
@Override
public boolean verify(String urlHostName, SSLSession session) {
if (urlHostName.equals(session.getPeerHost())) {
return true;
} else {
return false;
}
}
};
HttpsURLConnection.setDefaultHostnameVerifier(hv);
try {
URL url = new URL(target + "?wsdl");
HttpURLConnection con = (HttpURLConnection) url.openConnection();
if (con instanceof HttpsURLConnection) {
HttpsURLConnection sslconn = (HttpsURLConnection) con;
SSLContext context = SSLContext.getInstance("SSL");
context.init(SSLConfigurationModule.getKeyManagers(),
SSLConfigurationModule.getTrustManagers(),
new java.security.SecureRandom());
sslconn.setSSLSocketFactory(context.getSocketFactory());
sslconn.connect();
if (sslconn.getResponseCode() == HttpURLConnection.HTTP_OK) {
sslconn.disconnect();
return true;
}
} else {
con.connect();
if (con.getResponseCode() == HttpURLConnection.HTTP_OK) {
con.disconnect();
return true;
}
}
} catch (Exception e) {
Tracing.createLoggerFor(getClass()).error("Error while trying to connect to webservice: " + target, e);
}
return false;
}
//<OLATCE-1124>
public boolean hasAnyResults(boolean forSurvey,List<Identity> forStudents, String surveyFolder, CourseNode node){
boolean hasResults = false;
FilenameFilter filter = new OnyxReporterConnectorFileNameFilter(node.getIdent());
File directory = new File(WebappHelper.getUserDataRoot());
if(forSurvey){
File surveyDir = new File(surveyFolder);
if(surveyDir.exists()) {
String[] allXmls = surveyDir.list(filter);
if (allXmls != null && allXmls.length > 0) {
hasResults = true;
}
}
} else {
String assessmentType = node.getModuleConfiguration().get(IQEditController.CONFIG_KEY_TYPE).toString();
String path = null;
for(Identity student : forStudents){
path = OnyxResultManager.getResReporting() + File.separator + student.getName() + File.separator
+ assessmentType + File.separator;
File xml = new File(directory, path);
if (xml != null && xml.exists()) {
String[] allXmls = xml.list(filter);
if (allXmls != null && allXmls.length > 0) {
hasResults = true;
break;
}
}
}
}
return hasResults;
}
//</OLATCE-1124>
}
//</ONYX-705>
| |
/*
QuickFind (http://quickfind.sourceforge.net/)
Cross-platform Java application for searching files in your Computer.
Copyright (c) 2010, 2013 Vasantkumar Mulage
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the QuickFind nor the names of its contributors
may be used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package net.quickfind.gui;
import java.awt.Color;
import java.awt.Desktop;
import java.awt.Toolkit;
import java.io.IOException;
import java.net.URISyntaxException;
import java.util.logging.Level;
import java.util.logging.Logger;
import net.quickfind.config.PropertyPage;
/*
* QuickFindAbout.java
* @author Copyright (C) 2010 Vasantkumar Mulage
*/
public class QuickFindSplash extends javax.swing.JFrame {
/*
* Exception Logger
*/
private final static Logger LOGGER = Logger.getLogger(QuickFindSplash.class.getName());
/*
* Creates QuickFindSplash
*/
public QuickFindSplash() {
setUndecorated(true);
this.setIconImage(Toolkit.getDefaultToolkit().getImage(getClass().getResource(PropertyPage.IMAGES_PATH + PropertyPage.SYSTEM_ICON)));
initComponents();
pack();
this.setLocationRelativeTo(this);
productNameLabel.grabFocus();
productVersionValueLabel.setText(PropertyPage.PRODUCT_VERSION);
homePageValueLabel.setText(PropertyPage.WEBSITE);
mainPanel.setBackground(Color.WHITE);
}
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
mainPanel = new javax.swing.JPanel();
spashImageLabel = new javax.swing.JLabel();
productVersionLabel = new javax.swing.JLabel();
productNameLabel = new javax.swing.JLabel();
productVersionValueLabel = new javax.swing.JLabel();
homePageLabel = new javax.swing.JLabel();
homePageValueLabel = new javax.swing.JLabel();
loadingLabel = new javax.swing.JLabel();
splashUnderlineLabel = new javax.swing.JLabel();
jLabel1 = new javax.swing.JLabel();
setDefaultCloseOperation(javax.swing.WindowConstants.DISPOSE_ON_CLOSE);
setTitle("QuickFind");
setBackground(java.awt.Color.white);
setForeground(java.awt.Color.white);
setResizable(false);
mainPanel.setBackground(java.awt.Color.white);
mainPanel.setBorder(javax.swing.BorderFactory.createTitledBorder(""));
spashImageLabel.setIcon(new javax.swing.ImageIcon(getClass().getResource(PropertyPage.IMAGES_PATH + PropertyPage.SPLASH_ICON)));
productVersionLabel.setFont(new java.awt.Font("Tahoma", 1, 11)); // NOI18N
productVersionLabel.setText("Product Version:");
productNameLabel.setFont(new java.awt.Font("Times New Roman", 1, 24)); // NOI18N
productNameLabel.setHorizontalAlignment(javax.swing.SwingConstants.CENTER);
productNameLabel.setText("QuickFind");
productNameLabel.setHorizontalTextPosition(javax.swing.SwingConstants.CENTER);
productVersionValueLabel.setFont(new java.awt.Font("Tahoma", 0, 14)); // NOI18N
productVersionValueLabel.setText("<html> \n<font size='4'>1.0.0.1</font>\n<font size='2'>BETA</font>\n</html>");
homePageLabel.setFont(new java.awt.Font("Tahoma", 1, 11));
homePageLabel.setText("Home Page:");
homePageValueLabel.setText("<html><a href=\\\"http://quickfind.sf.net/\">http://quickfind.sf.net/</a></html>");
homePageValueLabel.setCursor(new java.awt.Cursor(java.awt.Cursor.HAND_CURSOR));
homePageValueLabel.setName("http://quickfind.sf.net/"); // NOI18N
homePageValueLabel.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
homePageValueLabelMouseClicked(evt);
}
});
loadingLabel.setFont(new java.awt.Font("Tahoma", 0, 14));
loadingLabel.setText("Loading...");
splashUnderlineLabel.setText("______________________________________");
jLabel1.setFont(new java.awt.Font("Arial Rounded MT Bold", 0, 11)); // NOI18N
jLabel1.setHorizontalAlignment(javax.swing.SwingConstants.CENTER);
jLabel1.setText("Instant File Search");
jLabel1.setHorizontalTextPosition(javax.swing.SwingConstants.CENTER);
javax.swing.GroupLayout mainPanelLayout = new javax.swing.GroupLayout(mainPanel);
mainPanel.setLayout(mainPanelLayout);
mainPanelLayout.setHorizontalGroup(
mainPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(mainPanelLayout.createSequentialGroup()
.addGroup(mainPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false)
.addComponent(jLabel1, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(splashUnderlineLabel, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(spashImageLabel, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(productNameLabel, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
.addGap(46, 46, 46)
.addGroup(mainPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(productVersionLabel)
.addComponent(homePageLabel))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addGroup(mainPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(productVersionValueLabel, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGroup(mainPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING)
.addComponent(loadingLabel)
.addComponent(homePageValueLabel, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)))
.addContainerGap())
);
mainPanelLayout.setVerticalGroup(
mainPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(mainPanelLayout.createSequentialGroup()
.addGroup(mainPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(mainPanelLayout.createSequentialGroup()
.addGap(34, 34, 34)
.addGroup(mainPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(productVersionLabel)
.addComponent(productVersionValueLabel, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGap(18, 18, 18)
.addGroup(mainPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(homePageLabel)
.addComponent(homePageValueLabel, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)))
.addComponent(spashImageLabel, javax.swing.GroupLayout.PREFERRED_SIZE, 144, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(splashUnderlineLabel)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(mainPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(loadingLabel)
.addComponent(productNameLabel))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(jLabel1))
);
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane());
getContentPane().setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(mainPanel, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(mainPanel, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
);
pack();
}// </editor-fold>//GEN-END:initComponents
/*
* Opens QuickFind donate page in new browser window
*/
private void homePageValueLabelMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_homePageValueLabelMouseClicked
try {
java.net.URI uri = new java.net.URI(homePageValueLabel.getName());
Desktop.getDesktop().browse(uri);
} catch (URISyntaxException URISyntaxEx) {
LOGGER.log(Level.SEVERE, "Invalid URI passed", URISyntaxEx);
} catch (IOException iOException) {
LOGGER.log(Level.SEVERE, "Browser failed to launch", iOException);
}
}//GEN-LAST:event_homePageValueLabelMouseClicked
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JLabel homePageLabel;
private javax.swing.JLabel homePageValueLabel;
private javax.swing.JLabel jLabel1;
private javax.swing.JLabel loadingLabel;
private javax.swing.JPanel mainPanel;
private javax.swing.JLabel productNameLabel;
private javax.swing.JLabel productVersionLabel;
private javax.swing.JLabel productVersionValueLabel;
private javax.swing.JLabel spashImageLabel;
private javax.swing.JLabel splashUnderlineLabel;
// End of variables declaration//GEN-END:variables
}
| |
package com.xoppa.blog.libgdx.g3d.bullet.dynamics.step5;
import com.badlogic.gdx.ApplicationListener;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.graphics.Color;
import com.badlogic.gdx.graphics.GL20;
import com.badlogic.gdx.graphics.PerspectiveCamera;
import com.badlogic.gdx.graphics.VertexAttributes.Usage;
import com.badlogic.gdx.graphics.g3d.Environment;
import com.badlogic.gdx.graphics.g3d.Material;
import com.badlogic.gdx.graphics.g3d.Model;
import com.badlogic.gdx.graphics.g3d.ModelBatch;
import com.badlogic.gdx.graphics.g3d.ModelInstance;
import com.badlogic.gdx.graphics.g3d.attributes.ColorAttribute;
import com.badlogic.gdx.graphics.g3d.environment.DirectionalLight;
import com.badlogic.gdx.graphics.g3d.utils.CameraInputController;
import com.badlogic.gdx.graphics.g3d.utils.ModelBuilder;
import com.badlogic.gdx.math.MathUtils;
import com.badlogic.gdx.math.Matrix4;
import com.badlogic.gdx.math.Vector3;
import com.badlogic.gdx.physics.bullet.Bullet;
import com.badlogic.gdx.physics.bullet.collision.ContactListener;
import com.badlogic.gdx.physics.bullet.collision.btBoxShape;
import com.badlogic.gdx.physics.bullet.collision.btBroadphaseInterface;
import com.badlogic.gdx.physics.bullet.collision.btCapsuleShape;
import com.badlogic.gdx.physics.bullet.collision.btCollisionConfiguration;
import com.badlogic.gdx.physics.bullet.collision.btCollisionDispatcher;
import com.badlogic.gdx.physics.bullet.collision.btCollisionObject;
import com.badlogic.gdx.physics.bullet.collision.btCollisionShape;
import com.badlogic.gdx.physics.bullet.collision.btConeShape;
import com.badlogic.gdx.physics.bullet.collision.btCylinderShape;
import com.badlogic.gdx.physics.bullet.collision.btDbvtBroadphase;
import com.badlogic.gdx.physics.bullet.collision.btDefaultCollisionConfiguration;
import com.badlogic.gdx.physics.bullet.collision.btDispatcher;
import com.badlogic.gdx.physics.bullet.collision.btSphereShape;
import com.badlogic.gdx.physics.bullet.dynamics.btConstraintSolver;
import com.badlogic.gdx.physics.bullet.dynamics.btDiscreteDynamicsWorld;
import com.badlogic.gdx.physics.bullet.dynamics.btDynamicsWorld;
import com.badlogic.gdx.physics.bullet.dynamics.btRigidBody;
import com.badlogic.gdx.physics.bullet.dynamics.btSequentialImpulseConstraintSolver;
import com.badlogic.gdx.physics.bullet.linearmath.btMotionState;
import com.badlogic.gdx.utils.Array;
import com.badlogic.gdx.utils.ArrayMap;
import com.badlogic.gdx.utils.Disposable;
/** @see https://xoppa.github.io/blog/using-the-libgdx-3d-physics-bullet-wrapper-part2/
* @author Xoppa */
public class BulletTest implements ApplicationListener {
final static short GROUND_FLAG = 1 << 8;
final static short OBJECT_FLAG = 1 << 9;
final static short ALL_FLAG = -1;
class MyContactListener extends ContactListener {
@Override
public boolean onContactAdded (int userValue0, int partId0, int index0, boolean match0, int userValue1, int partId1,
int index1, boolean match1) {
if (match0)
((ColorAttribute)instances.get(userValue0).materials.get(0).get(ColorAttribute.Diffuse)).color.set(Color.WHITE);
if (match1)
((ColorAttribute)instances.get(userValue1).materials.get(0).get(ColorAttribute.Diffuse)).color.set(Color.WHITE);
return true;
}
}
static class MyMotionState extends btMotionState {
Matrix4 transform;
@Override
public void getWorldTransform (Matrix4 worldTrans) {
worldTrans.set(transform);
}
@Override
public void setWorldTransform (Matrix4 worldTrans) {
transform.set(worldTrans);
}
}
static class GameObject extends ModelInstance implements Disposable {
public final btRigidBody body;
public final MyMotionState motionState;
public GameObject (Model model, String node, btRigidBody.btRigidBodyConstructionInfo constructionInfo) {
super(model, node);
motionState = new MyMotionState();
motionState.transform = transform;
body = new btRigidBody(constructionInfo);
body.setMotionState(motionState);
}
@Override
public void dispose () {
body.dispose();
motionState.dispose();
}
static class Constructor implements Disposable {
public final Model model;
public final String node;
public final btCollisionShape shape;
public final btRigidBody.btRigidBodyConstructionInfo constructionInfo;
private static Vector3 localInertia = new Vector3();
public Constructor (Model model, String node, btCollisionShape shape, float mass) {
this.model = model;
this.node = node;
this.shape = shape;
if (mass > 0f)
shape.calculateLocalInertia(mass, localInertia);
else
localInertia.set(0, 0, 0);
this.constructionInfo = new btRigidBody.btRigidBodyConstructionInfo(mass, null, shape, localInertia);
}
public GameObject construct () {
return new GameObject(model, node, constructionInfo);
}
@Override
public void dispose () {
shape.dispose();
constructionInfo.dispose();
}
}
}
PerspectiveCamera cam;
CameraInputController camController;
ModelBatch modelBatch;
Environment environment;
Model model;
Array<GameObject> instances;
ArrayMap<String, GameObject.Constructor> constructors;
float spawnTimer;
btCollisionConfiguration collisionConfig;
btDispatcher dispatcher;
MyContactListener contactListener;
btBroadphaseInterface broadphase;
btDynamicsWorld dynamicsWorld;
btConstraintSolver constraintSolver;
@Override
public void create () {
Bullet.init();
modelBatch = new ModelBatch();
environment = new Environment();
environment.set(new ColorAttribute(ColorAttribute.AmbientLight, 0.4f, 0.4f, 0.4f, 1f));
environment.add(new DirectionalLight().set(0.8f, 0.8f, 0.8f, -1f, -0.8f, -0.2f));
cam = new PerspectiveCamera(67, Gdx.graphics.getWidth(), Gdx.graphics.getHeight());
cam.position.set(3f, 7f, 10f);
cam.lookAt(0, 4f, 0);
cam.near = 1f;
cam.far = 300f;
cam.update();
camController = new CameraInputController(cam);
Gdx.input.setInputProcessor(camController);
ModelBuilder mb = new ModelBuilder();
mb.begin();
mb.node().id = "ground";
mb.part("ground", GL20.GL_TRIANGLES, Usage.Position | Usage.Normal, new Material(ColorAttribute.createDiffuse(Color.RED)))
.box(5f, 1f, 5f);
mb.node().id = "sphere";
mb.part("sphere", GL20.GL_TRIANGLES, Usage.Position | Usage.Normal, new Material(ColorAttribute.createDiffuse(Color.GREEN)))
.sphere(1f, 1f, 1f, 10, 10);
mb.node().id = "box";
mb.part("box", GL20.GL_TRIANGLES, Usage.Position | Usage.Normal, new Material(ColorAttribute.createDiffuse(Color.BLUE)))
.box(1f, 1f, 1f);
mb.node().id = "cone";
mb.part("cone", GL20.GL_TRIANGLES, Usage.Position | Usage.Normal, new Material(ColorAttribute.createDiffuse(Color.YELLOW)))
.cone(1f, 2f, 1f, 10);
mb.node().id = "capsule";
mb.part("capsule", GL20.GL_TRIANGLES, Usage.Position | Usage.Normal, new Material(ColorAttribute.createDiffuse(Color.CYAN)))
.capsule(0.5f, 2f, 10);
mb.node().id = "cylinder";
mb.part("cylinder", GL20.GL_TRIANGLES, Usage.Position | Usage.Normal,
new Material(ColorAttribute.createDiffuse(Color.MAGENTA))).cylinder(1f, 2f, 1f, 10);
model = mb.end();
constructors = new ArrayMap<String, GameObject.Constructor>(String.class, GameObject.Constructor.class);
constructors.put("ground", new GameObject.Constructor(model, "ground", new btBoxShape(new Vector3(2.5f, 0.5f, 2.5f)), 0f));
constructors.put("sphere", new GameObject.Constructor(model, "sphere", new btSphereShape(0.5f), 1f));
constructors.put("box", new GameObject.Constructor(model, "box", new btBoxShape(new Vector3(0.5f, 0.5f, 0.5f)), 1f));
constructors.put("cone", new GameObject.Constructor(model, "cone", new btConeShape(0.5f, 2f), 1f));
constructors.put("capsule", new GameObject.Constructor(model, "capsule", new btCapsuleShape(.5f, 1f), 1f));
constructors.put("cylinder", new GameObject.Constructor(model, "cylinder", new btCylinderShape(new Vector3(.5f, 1f, .5f)),
1f));
collisionConfig = new btDefaultCollisionConfiguration();
dispatcher = new btCollisionDispatcher(collisionConfig);
broadphase = new btDbvtBroadphase();
constraintSolver = new btSequentialImpulseConstraintSolver();
dynamicsWorld = new btDiscreteDynamicsWorld(dispatcher, broadphase, constraintSolver, collisionConfig);
dynamicsWorld.setGravity(new Vector3(0, -10f, 0));
contactListener = new MyContactListener();
instances = new Array<GameObject>();
GameObject object = constructors.get("ground").construct();
instances.add(object);
dynamicsWorld.addRigidBody(object.body);
object.body.setContactCallbackFlag(GROUND_FLAG);
object.body.setContactCallbackFilter(0);
}
public void spawn () {
GameObject obj = constructors.values[1 + MathUtils.random(constructors.size - 2)].construct();
obj.transform.setFromEulerAngles(MathUtils.random(360f), MathUtils.random(360f), MathUtils.random(360f));
obj.transform.trn(MathUtils.random(-2.5f, 2.5f), 9f, MathUtils.random(-2.5f, 2.5f));
obj.body.proceedToTransform(obj.transform);
obj.body.setUserValue(instances.size);
obj.body.setCollisionFlags(obj.body.getCollisionFlags() | btCollisionObject.CollisionFlags.CF_CUSTOM_MATERIAL_CALLBACK);
instances.add(obj);
dynamicsWorld.addRigidBody(obj.body);
obj.body.setContactCallbackFlag(OBJECT_FLAG);
obj.body.setContactCallbackFilter(GROUND_FLAG);
}
@Override
public void render () {
final float delta = Math.min(1f / 30f, Gdx.graphics.getDeltaTime());
dynamicsWorld.stepSimulation(delta, 5, 1f / 60f);
if ((spawnTimer -= delta) < 0) {
spawn();
spawnTimer = 1.5f;
}
camController.update();
Gdx.gl.glClearColor(0.3f, 0.3f, 0.3f, 1.f);
Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT | GL20.GL_DEPTH_BUFFER_BIT);
modelBatch.begin(cam);
modelBatch.render(instances, environment);
modelBatch.end();
}
@Override
public void dispose () {
for (GameObject obj : instances)
obj.dispose();
instances.clear();
for (GameObject.Constructor ctor : constructors.values())
ctor.dispose();
constructors.clear();
dynamicsWorld.dispose();
constraintSolver.dispose();
broadphase.dispose();
dispatcher.dispose();
collisionConfig.dispose();
contactListener.dispose();
modelBatch.dispose();
model.dispose();
}
@Override
public void pause () {
}
@Override
public void resume () {
}
@Override
public void resize (int width, int height) {
}
}
| |
package us.koller.cameraroll.ui;
import android.animation.Animator;
import android.animation.AnimatorListenerAdapter;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.res.ColorStateList;
import android.graphics.Color;
import android.graphics.drawable.Animatable;
import android.graphics.drawable.AnimatedVectorDrawable;
import android.graphics.drawable.ColorDrawable;
import android.graphics.drawable.Drawable;
import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.support.annotation.RequiresApi;
import android.support.design.widget.FloatingActionButton;
import android.support.design.widget.Snackbar;
import android.support.v4.content.ContextCompat;
import android.support.v4.graphics.drawable.DrawableCompat;
import android.support.v7.app.ActionBar;
import android.support.v7.app.AlertDialog;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.Toolbar;
import android.text.TextUtils;
import android.transition.Fade;
import android.transition.Slide;
import android.transition.TransitionSet;
import android.util.Log;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.view.ViewTreeObserver;
import android.view.WindowInsets;
import android.view.WindowManager;
import android.view.animation.AccelerateDecelerateInterpolator;
import android.widget.EditText;
import android.widget.TextView;
import android.widget.Toast;
import java.util.ArrayList;
import us.koller.cameraroll.R;
import us.koller.cameraroll.data.models.VirtualAlbum;
import us.koller.cameraroll.themes.Theme;
import us.koller.cameraroll.adapter.fileExplorer.RecyclerViewAdapter;
import us.koller.cameraroll.data.fileOperations.Copy;
import us.koller.cameraroll.data.fileOperations.Delete;
import us.koller.cameraroll.data.fileOperations.FileOperation;
import us.koller.cameraroll.data.fileOperations.Move;
import us.koller.cameraroll.data.models.File_POJO;
import us.koller.cameraroll.data.provider.FilesProvider;
import us.koller.cameraroll.data.provider.Provider;
import us.koller.cameraroll.data.models.StorageRoot;
import us.koller.cameraroll.ui.widget.SwipeBackCoordinatorLayout;
import us.koller.cameraroll.util.animators.ColorFade;
import us.koller.cameraroll.util.Util;
public class FileExplorerActivity extends ThemeableActivity
implements SwipeBackCoordinatorLayout.OnSwipeListener, RecyclerViewAdapter.Callback {
public static final String ROOTS = "ROOTS";
public static final String CURRENT_DIR = "CURRENT_DIR";
public static final String MODE = "MODE";
public static final String SELECTED_ITEMS = "SELECTED_ITEMS";
public static final String STORAGE_ROOTS = "Storage Roots";
public static final String FILE_OPERATION = "FILE_OPERATION";
private File_POJO roots;
private File_POJO currentDir;
private FilesProvider filesProvider;
private RecyclerView recyclerView;
private RecyclerViewAdapter recyclerViewAdapter;
private Menu menu;
private Intent fileOpIntent;
public interface OnDirectoryChangeCallback {
void changeDir(String path);
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_file_explorer);
currentDir = new File_POJO("", false);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
getWindow().setEnterTransition(new TransitionSet()
.setOrdering(TransitionSet.ORDERING_TOGETHER)
.addTransition(new Slide(Gravity.BOTTOM))
.addTransition(new Fade())
.setInterpolator(new AccelerateDecelerateInterpolator()));
getWindow().setReturnTransition(new TransitionSet()
.setOrdering(TransitionSet.ORDERING_TOGETHER)
.addTransition(new Slide(Gravity.BOTTOM))
.addTransition(new Fade())
.setInterpolator(new AccelerateDecelerateInterpolator()));
}
final Toolbar toolbar = findViewById(R.id.toolbar);
toolbar.setBackgroundColor(toolbarColor);
toolbar.setTitleTextColor(textColorPrimary);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
AnimatedVectorDrawable drawable = (AnimatedVectorDrawable)
ContextCompat.getDrawable(FileExplorerActivity.this, R.drawable.back_to_cancel_avd);
//mutating avd to reset it
drawable.mutate();
toolbar.setNavigationIcon(drawable);
} else {
toolbar.setNavigationIcon(R.drawable.ic_arrow_back_white_24dp);
}
Drawable navIcon = toolbar.getNavigationIcon();
if (navIcon != null) {
navIcon = DrawableCompat.wrap(navIcon);
DrawableCompat.setTint(navIcon.mutate(), textColorSecondary);
toolbar.setNavigationIcon(navIcon);
}
setSupportActionBar(toolbar);
ActionBar actionBar = getSupportActionBar();
if (actionBar != null) {
actionBar.setTitle(getString(R.string.file_explorer));
actionBar.setDisplayHomeAsUpEnabled(true);
}
Util.colorToolbarOverflowMenuIcon(toolbar, textColorSecondary);
//need to be called after setTitle(), to ensure, that mTitleTextView exists
final TextView titleTextView = Util.setToolbarTypeface(toolbar
);
if (titleTextView != null) {
titleTextView.setEllipsize(TextUtils.TruncateAt.START);
}
final ViewGroup rootView = findViewById(R.id.swipeBackView);
if (rootView instanceof SwipeBackCoordinatorLayout) {
((SwipeBackCoordinatorLayout) rootView).setOnSwipeListener(this);
}
recyclerView = findViewById(R.id.recyclerView);
recyclerView.setLayoutManager(new LinearLayoutManager(this));
recyclerViewAdapter = new RecyclerViewAdapter(
new OnDirectoryChangeCallback() {
@Override
public void changeDir(String path) {
loadDirectory(path);
}
}, this);
if (savedInstanceState != null && savedInstanceState.containsKey(CURRENT_DIR)) {
recyclerViewAdapter.setFiles(currentDir);
}
recyclerViewAdapter.notifyDataSetChanged();
recyclerView.setAdapter(recyclerViewAdapter);
//setup fab
final FloatingActionButton fab = findViewById(R.id.fab);
fab.setImageResource(R.drawable.ic_create_new_folder_white_24dp);
Drawable d = fab.getDrawable();
d = DrawableCompat.wrap(d);
DrawableCompat.setTint(d.mutate(), accentTextColor);
fab.setImageDrawable(d);
fab.setScaleX(0.0f);
fab.setScaleY(0.0f);
//setting window insets manually
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT_WATCH) {
rootView.setOnApplyWindowInsetsListener(new View.OnApplyWindowInsetsListener() {
@RequiresApi(api = Build.VERSION_CODES.KITKAT_WATCH)
@Override
public WindowInsets onApplyWindowInsets(View view, WindowInsets insets) {
toolbar.setPadding(toolbar.getPaddingStart() /*+ insets.getSystemWindowInsetLeft()*/,
toolbar.getPaddingTop() + insets.getSystemWindowInsetTop(),
toolbar.getPaddingEnd() /*+ insets.getSystemWindowInsetRight()*/,
toolbar.getPaddingBottom());
ViewGroup.MarginLayoutParams toolbarParams
= (ViewGroup.MarginLayoutParams) toolbar.getLayoutParams();
toolbarParams.leftMargin += insets.getSystemWindowInsetLeft();
toolbarParams.rightMargin += insets.getSystemWindowInsetRight();
toolbar.setLayoutParams(toolbarParams);
recyclerView.setPadding(recyclerView.getPaddingStart() + insets.getSystemWindowInsetLeft(),
recyclerView.getPaddingTop() + insets.getSystemWindowInsetTop(),
recyclerView.getPaddingEnd() + insets.getSystemWindowInsetRight(),
recyclerView.getPaddingBottom() + insets.getSystemWindowInsetBottom());
fab.setTranslationY(-insets.getSystemWindowInsetBottom());
fab.setTranslationX(-insets.getSystemWindowInsetRight());
// clear this listener so insets aren't re-applied
rootView.setOnApplyWindowInsetsListener(null);
return insets.consumeSystemWindowInsets();
}
});
} else {
rootView.getViewTreeObserver()
.addOnGlobalLayoutListener(
new ViewTreeObserver.OnGlobalLayoutListener() {
@Override
public void onGlobalLayout() {
// hacky way of getting window insets on pre-Lollipop
// somewhat works...
int[] screenSize = Util.getScreenSize(FileExplorerActivity.this);
int[] windowInsets = new int[]{
Math.abs(screenSize[0] - rootView.getLeft()),
Math.abs(screenSize[1] - rootView.getTop()),
Math.abs(screenSize[2] - rootView.getRight()),
Math.abs(screenSize[3] - rootView.getBottom())};
toolbar.setPadding(toolbar.getPaddingStart(),
toolbar.getPaddingTop() + windowInsets[1],
toolbar.getPaddingEnd(),
toolbar.getPaddingBottom());
ViewGroup.MarginLayoutParams toolbarParams
= (ViewGroup.MarginLayoutParams) toolbar.getLayoutParams();
toolbarParams.leftMargin += windowInsets[0];
toolbarParams.rightMargin += windowInsets[2];
toolbar.setLayoutParams(toolbarParams);
recyclerView.setPadding(recyclerView.getPaddingStart() + windowInsets[0],
recyclerView.getPaddingTop() + windowInsets[1],
recyclerView.getPaddingEnd() + windowInsets[2],
recyclerView.getPaddingBottom() + windowInsets[3]);
fab.setTranslationY(-windowInsets[2]);
fab.setTranslationX(-windowInsets[3]);
rootView.getViewTreeObserver().removeOnGlobalLayoutListener(this);
}
});
}
//needed to achieve transparent navBar
setSystemUiFlags();
//load files
if (savedInstanceState != null
&& savedInstanceState.containsKey(CURRENT_DIR)
&& savedInstanceState.containsKey(ROOTS)) {
roots = savedInstanceState.getParcelable(ROOTS);
currentDir = savedInstanceState.getParcelable(CURRENT_DIR);
recyclerViewAdapter.setFiles(currentDir);
recyclerViewAdapter.notifyDataSetChanged();
onDataChanged();
if (savedInstanceState.containsKey(MODE)) {
int mode = savedInstanceState.getInt(MODE);
if (mode == RecyclerViewAdapter.SELECTOR_MODE) {
if (savedInstanceState.containsKey(SELECTED_ITEMS)) {
final File_POJO[] selectedItems
= (File_POJO[]) savedInstanceState.getParcelableArray(SELECTED_ITEMS);
if (selectedItems != null) {
rootView.getViewTreeObserver().addOnGlobalLayoutListener(
new ViewTreeObserver.OnGlobalLayoutListener() {
@Override
public void onGlobalLayout() {
rootView.getViewTreeObserver().removeOnGlobalLayoutListener(this);
recyclerViewAdapter.enterSelectorMode(selectedItems);
}
});
}
}
} else if (mode == RecyclerViewAdapter.PICK_TARGET_MODE
&& savedInstanceState.containsKey(FILE_OPERATION)) {
onSelectorModeEnter();
//fileOp = savedInstanceState.getParcelable(FILE_OPERATION);
/*FileOperation.operation = fileOp != null ?
fileOp.getType() : FileOperation.EMPTY;*/
//need to call pick target after onSelectorModeEnter animation are done
new Handler().postDelayed(new Runnable() {
@Override
public void run() {
recyclerViewAdapter.pickTarget();
}
}, (int) (500 * Util.getAnimatorSpeed(this)));
}
}
} else {
loadRoots();
//show warning dialog
/*new AlertDialog.Builder(this, getDialogThemeRes())
.setTitle(R.string.warning)
.setMessage(Html.fromHtml(getString(R.string.file_explorer_warning_message)))
.setPositiveButton(R.string.ok, null)
.setNegativeButton(R.string.cancel, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialogInterface, int i) {
finish();
}
})
.show();*/
}
}
public void loadRoots() {
StorageRoot[] storageRoots = FilesProvider.getRoots(this);
roots = new StorageRoot(STORAGE_ROOTS);
for (int i = 0; i < storageRoots.length; i++) {
roots.addChild(storageRoots[i]);
}
FileExplorerActivity.this.currentDir = roots;
if (recyclerViewAdapter != null) {
recyclerViewAdapter.setFiles(currentDir);
recyclerViewAdapter.notifyDataSetChanged();
onDataChanged();
}
}
public void loadDirectory(final String path) {
Log.d("FileExplorerActivity", "loadDirectory(): " + path);
final Snackbar snackbar = Snackbar.make(findViewById(R.id.root_view),
getString(R.string.loading), Snackbar.LENGTH_INDEFINITE);
Util.showSnackbar(snackbar);
final FilesProvider.Callback callback = new FilesProvider.Callback() {
@Override
public void onDirLoaded(final File_POJO dir) {
runOnUiThread(new Runnable() {
@Override
public void run() {
filesProvider.onDestroy();
filesProvider = null;
if (dir != null) {
FileExplorerActivity.this.currentDir = dir;
if (recyclerViewAdapter != null) {
recyclerViewAdapter.setFiles(currentDir);
recyclerViewAdapter.notifyDataSetChanged();
onDataChanged();
}
}
snackbar.dismiss();
}
});
}
@Override
public void timeout() {
runOnUiThread(new Runnable() {
@Override
public void run() {
snackbar.dismiss();
final Snackbar snackbar = Snackbar.make(findViewById(R.id.root_view),
R.string.loading_failed, Snackbar.LENGTH_INDEFINITE);
snackbar.setAction(getString(R.string.retry), new View.OnClickListener() {
@Override
public void onClick(View view) {
loadDirectory(path);
}
});
Util.showSnackbar(snackbar);
}
});
}
@Override
public void needPermission() {
runOnUiThread(new Runnable() {
@Override
public void run() {
snackbar.dismiss();
}
});
}
};
filesProvider = new FilesProvider(this);
filesProvider.loadDir(this, path, callback);
}
@Override
protected void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
outState.putParcelable(ROOTS, roots);
if (currentDir != null) {
outState.putParcelable(CURRENT_DIR, currentDir);
}
outState.putInt(MODE, recyclerViewAdapter.getMode());
File_POJO[] selectedItems = recyclerViewAdapter.getSelectedItems();
if (selectedItems.length > 0) {
outState.putParcelableArray(SELECTED_ITEMS, selectedItems);
}
/*if (fileOp != null) {
outState.putParcelable(FILE_OPERATION, fileOp);
File_POJO[] files = FileOperation.getFiles(fileOpIntent);
outState.putParcelableArray(SELECTED_ITEMS, files);
}*/
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.file_explorer, menu);
this.menu = menu;
//hide menu items; items are made visible, when a folder gets selected
manageMenuItems();
Drawable icon = menu.findItem(R.id.paste).getIcon().mutate();
icon = DrawableCompat.wrap(icon);
DrawableCompat.setTint(icon.mutate(), accentTextColor);
menu.findItem(R.id.paste).setIcon(icon);
return super.onCreateOptionsMenu(menu);
}
public void manageMenuItems() {
if (menu != null) {
for (int i = 0; i < menu.size(); i++) {
MenuItem item = menu.getItem(i);
switch (item.getItemId()) {
case R.id.exclude:
if (currentDir != null) {
item.setVisible(!currentDir.getPath().equals(STORAGE_ROOTS));
if (Provider.isPathPermanentlyExcluded(currentDir.getPath())) {
item.setChecked(true);
item.setEnabled(false);
} else {
item.setChecked(currentDir.excluded);
item.setEnabled(!currentDir.getPath().equals(STORAGE_ROOTS)
&& !Provider.isDirExcludedBecauseParentDirIsExcluded(
currentDir.getPath(), Provider.getExcludedPaths()));
}
} else {
item.setVisible(true);
item.setChecked(false);
item.setEnabled(false);
}
break;
case R.id.scan:
item.setVisible(!currentDir.getPath().equals(STORAGE_ROOTS));
break;
case R.id.add_to_virtual_album:
item.setVisible(!currentDir.getPath().equals(STORAGE_ROOTS));
break;
default:
item.setVisible(false);
break;
}
}
}
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case android.R.id.home:
if (recyclerViewAdapter.isModeActive()
|| recyclerViewAdapter.getMode()
== RecyclerViewAdapter.PICK_TARGET_MODE) {
recyclerViewAdapter.cancelMode();
} else {
onBackPressed();
}
break;
case R.id.exclude:
currentDir.excluded = !currentDir.excluded;
item.setChecked(currentDir.excluded);
if (currentDir.excluded) {
FilesProvider.addExcludedPath(this, currentDir.getPath());
} else {
FilesProvider.removeExcludedPath(this, currentDir.getPath());
}
break;
case R.id.scan:
ArrayList<String> paths = FileOperation.Util
.getAllChildPaths(new ArrayList<String>(), currentDir.getPath());
String[] pathsArray = new String[paths.size()];
paths.toArray(pathsArray);
FileOperation.Util.scanPathsWithToast(this, pathsArray);
break;
case R.id.add_to_virtual_album:
String path = currentDir.getPath();
AlertDialog dialog = VirtualAlbum.Util.getAddToVirtualAlbumDialog(this, path);
dialog.show();
break;
case R.id.paste:
if (!currentDir.getPath().equals(STORAGE_ROOTS)) {
recyclerViewAdapter.cancelMode();
if (fileOpIntent != null) {
File_POJO target = recyclerViewAdapter.getFiles();
fileOpIntent.putExtra(FileOperation.TARGET, target);
startService(fileOpIntent);
fileOpIntent = null;
}
} else {
Toast.makeText(this, R.string.paste_error, Toast.LENGTH_SHORT).show();
}
break;
case R.id.copy:
fileOpIntent = new Intent(this, Copy.class)
.setAction(FileOperation.Util.getActionString(this, FileOperation.COPY));
recyclerViewAdapter.cancelMode();
break;
case R.id.move:
fileOpIntent = new Intent(this, Move.class)
.setAction(FileOperation.Util.getActionString(this, FileOperation.MOVE));
recyclerViewAdapter.cancelMode();
break;
case R.id.delete:
fileOpIntent = new Intent(this, Delete.class)
.setAction(FileOperation.Util.getActionString(this, FileOperation.DELETE));
recyclerViewAdapter.cancelMode();
break;
default:
break;
}
return super.onOptionsItemSelected(item);
}
public void fabClicked(View v) {
animateFab(false);
View dialogLayout = LayoutInflater.from(this).inflate(R.layout.input_dialog_layout,
(ViewGroup) findViewById(R.id.root_view), false);
final EditText editText = dialogLayout.findViewById(R.id.edit_text);
AlertDialog dialog = new AlertDialog.Builder(this, theme.getDialogThemeRes())
.setTitle(R.string.new_folder)
.setView(dialogLayout)
.setPositiveButton(R.string.create, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialogInterface, int i) {
String filename = editText.getText().toString();
File_POJO newFolder = new File_POJO(currentDir.getPath()
+ "/" + filename, false);
File_POJO[] files = new File_POJO[]{newFolder};
Intent intent = FileOperation.getDefaultIntent(
FileExplorerActivity.this,
FileOperation.NEW_DIR,
files);
startService(intent);
}
})
.setNegativeButton(getString(R.string.cancel), null)
.setOnDismissListener(new DialogInterface.OnDismissListener() {
@Override
public void onDismiss(DialogInterface dialogInterface) {
animateFab(true);
}
})
.create();
//noinspection ConstantConditions
dialog.getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_VISIBLE);
dialog.show();
}
public void animateFab(final boolean show) {
final FloatingActionButton fab = findViewById(R.id.fab);
if ((fab.getScaleX() == 1.0f && show)
|| (fab.getScaleX() == 0.0f && !show)) {
return;
}
if (show) {
fab.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
fabClicked(view);
}
});
} else {
fab.setOnClickListener(null);
}
fab.animate()
.scaleX(show ? 1.0f : 0.0f)
.scaleY(show ? 1.0f : 0.0f)
.alpha(show ? 1.0f : 0.0f)
.setDuration(250)
.start();
}
@Override
public void onBackPressed() {
if (recyclerViewAdapter.isModeActive()) {
recyclerViewAdapter.cancelMode();
} else if (currentDir != null && !currentDir.getPath().equals(STORAGE_ROOTS)) {
if (!isCurrentFileARoot()) {
String path = currentDir.getPath();
int index = path.lastIndexOf("/");
String parentPath = path.substring(0, index);
loadDirectory(parentPath);
} else {
loadRoots();
}
} else {
//setResult(RESULT_OK, new Intent(MainActivity.REFRESH_MEDIA));
super.onBackPressed();
}
}
private boolean isCurrentFileARoot() {
if (currentDir != null) {
if (currentDir.getPath().equals(STORAGE_ROOTS)) {
return true;
}
for (int i = 0; i < roots.getChildren().size(); i++) {
if (currentDir.getPath().equals(roots.getChildren().get(i).getPath())) {
return true;
}
}
}
return false;
}
@Override
protected void onDestroy() {
super.onDestroy();
Provider.saveExcludedPaths(this);
if (filesProvider != null) {
filesProvider.onDestroy();
}
}
@Override
public boolean canSwipeBack(int dir) {
return SwipeBackCoordinatorLayout.canSwipeBackForThisView(recyclerView, dir);
}
@Override
public void onSwipeProcess(float percent) {
getWindow().getDecorView().setBackgroundColor(
SwipeBackCoordinatorLayout.getBackgroundColor(percent));
boolean selectorModeActive = ((RecyclerViewAdapter) recyclerView.getAdapter()).isModeActive();
if (!theme.darkStatusBarIcons() && selectorModeActive) {
SwipeBackCoordinatorLayout layout = findViewById(R.id.swipeBackView);
Toolbar toolbar = findViewById(R.id.toolbar);
View rootView = findViewById(R.id.root_view);
int translationY = (int) layout.getTranslationY();
int statusBarHeight = toolbar.getPaddingTop();
if (translationY > statusBarHeight * 0.5) {
Util.setLightStatusBarIcons(rootView);
} else {
Util.setDarkStatusBarIcons(rootView);
}
}
}
@Override
public void onSwipeFinish(int dir) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
getWindow().setReturnTransition(new TransitionSet()
.setOrdering(TransitionSet.ORDERING_TOGETHER)
.addTransition(new Slide(dir > 0 ? Gravity.TOP : Gravity.BOTTOM))
.addTransition(new Fade())
.setInterpolator(new AccelerateDecelerateInterpolator()));
}
this.finish();
}
@Override
public void onSelectorModeEnter() {
fileOpIntent = null;
final Toolbar toolbar = findViewById(R.id.toolbar);
toolbar.setActivated(true);
toolbar.animate().translationY(0.0f).start();
if (theme.darkStatusBarIconsInSelectorMode()) {
Util.setDarkStatusBarIcons(findViewById(R.id.root_view));
} else {
Util.setLightStatusBarIcons(findViewById(R.id.root_view));
}
ColorDrawable statusBarOverlay = getStatusBarOverlay();
if (statusBarOverlay != null) {
ColorFade.fadeDrawableAlpha(statusBarOverlay, 0);
}
ColorFade.fadeBackgroundColor(toolbar, toolbarColor, accentColor);
ColorFade.fadeToolbarTitleColor(toolbar, accentTextColor, null);
//fade overflow menu icon
ColorFade.fadeDrawableColor(toolbar.getOverflowIcon(),
textColorSecondary, accentTextColor);
Drawable navIcon = toolbar.getNavigationIcon();
if (navIcon instanceof Animatable) {
((Animatable) navIcon).start();
ColorFade.fadeDrawableColor(navIcon,
textColorSecondary, accentTextColor);
}
new Handler().postDelayed(new Runnable() {
@Override
public void run() {
Drawable d;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
AnimatedVectorDrawable drawable = (AnimatedVectorDrawable)
ContextCompat.getDrawable(FileExplorerActivity.this,
R.drawable.cancel_to_back_avd);
//mutating avd to reset it
drawable.mutate();
d = drawable;
} else {
d = ContextCompat.getDrawable(FileExplorerActivity.this,
R.drawable.ic_clear_black_24dp);
}
d = DrawableCompat.wrap(d);
DrawableCompat.setTint(d.mutate(), accentTextColor);
toolbar.setNavigationIcon(d);
//make menu items visible
for (int i = 0; i < menu.size(); i++) {
MenuItem item = menu.getItem(i);
switch (item.getItemId()) {
case R.id.copy:
case R.id.move:
case R.id.delete:
item.setVisible(true);
break;
default:
item.setVisible(false);
break;
}
}
}
}, navIcon instanceof Animatable ? (int) (500 * Util.getAnimatorSpeed(this)) : 0);
}
@Override
public void onSelectorModeExit(final File_POJO[] selected_items) {
if (fileOpIntent != null) {
fileOpIntent.putExtra(FileOperation.FILES, selected_items);
switch (FileOperation.Util.getActionInt(this, fileOpIntent.getAction())) {
case FileOperation.DELETE:
resetToolbar();
String title;
int count = selected_items.length;
if (count == 1) {
title = getString(R.string.delete_file, count);
} else {
title = getString(R.string.delete_files, count);
}
new AlertDialog.Builder(this, theme.getDialogThemeRes())
.setTitle(title)
.setNegativeButton(getString(R.string.no), null)
.setPositiveButton(getString(R.string.delete), new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialogInterface, int i) {
startService(fileOpIntent);
fileOpIntent = null;
}
})
.create().show();
break;
case FileOperation.COPY:
case FileOperation.MOVE:
recyclerViewAdapter.pickTarget();
break;
default:
break;
}
}
if (fileOpIntent == null) {
resetToolbar();
}
}
@Override
public void onItemSelected(int count) {
if (count != 0) {
Toolbar toolbar = findViewById(R.id.toolbar);
final String title = getString(R.string.selected_count, count);
ColorFade.fadeToolbarTitleColor(toolbar, accentTextColor,
new ColorFade.ToolbarTitleFadeCallback() {
@Override
public void setTitle(Toolbar toolbar) {
toolbar.setTitle(title);
}
});
}
}
@Override
public void onPickTargetModeEnter() {
final Toolbar toolbar = findViewById(R.id.toolbar);
if (fileOpIntent != null) {
final int count = FileOperation.getFiles(fileOpIntent).length;
ColorFade.fadeToolbarTitleColor(toolbar, accentTextColor,
new ColorFade.ToolbarTitleFadeCallback() {
@Override
public void setTitle(Toolbar toolbar) {
String title = "";
int action = FileOperation.Util.getActionInt(
FileExplorerActivity.this, fileOpIntent.getAction());
switch (action) {
case FileOperation.COPY:
if (count == 1) {
title = getString(R.string.copy_file, count);
} else {
title = getString(R.string.copy_files, count);
}
break;
case FileOperation.MOVE:
if (count == 1) {
title = getString(R.string.move_file, count);
} else {
title = getString(R.string.move_files, count);
}
break;
}
toolbar.setTitle(title);
}
});
}
animateFab(true);
new Handler().postDelayed(new Runnable() {
@Override
public void run() {
//hide menu items
for (int i = 0; i < menu.size(); i++) {
MenuItem item = menu.getItem(i);
switch (item.getItemId()) {
case R.id.paste:
item.setVisible(true);
break;
default:
item.setVisible(false);
break;
}
}
}
}, (int) (300 * Util.getAnimatorSpeed(this)));
}
@Override
public void onPickTargetModeExit() {
animateFab(false);
resetToolbar();
}
@Override
public void onDataChanged() {
final View emptyState = findViewById(R.id.empty_state_text);
emptyState.animate()
.alpha(currentDir.getChildren().size() == 0 ? 1.0f : 0.0f)
.setListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
super.onAnimationEnd(animation);
emptyState.setVisibility(
currentDir.getChildren().size() == 0 ?
View.VISIBLE : View.GONE);
}
})
.setDuration(100)
.start();
if (recyclerViewAdapter.getMode() == RecyclerViewAdapter.NORMAL_MODE) {
final Toolbar toolbar = findViewById(R.id.toolbar);
ColorFade.fadeToolbarTitleColor(toolbar, textColorPrimary,
new ColorFade.ToolbarTitleFadeCallback() {
@Override
public void setTitle(Toolbar toolbar) {
toolbar.setTitle(currentDir.getPath());
}
});
}
if (recyclerViewAdapter.getMode() == RecyclerViewAdapter.NORMAL_MODE) {
manageMenuItems();
}
}
public void resetToolbar() {
final Toolbar toolbar = findViewById(R.id.toolbar);
if (theme.darkStatusBarIcons()) {
Util.setDarkStatusBarIcons(findViewById(R.id.root_view));
} else {
Util.setLightStatusBarIcons(findViewById(R.id.root_view));
}
ColorDrawable statusBarOverlay = getStatusBarOverlay();
if (statusBarOverlay != null) {
int alpha = Color.alpha(getStatusBarColor());
ColorFade.fadeDrawableAlpha(statusBarOverlay, alpha);
}
toolbar.setActivated(theme.elevatedToolbar());
ColorFade.fadeBackgroundColor(toolbar, accentColor, toolbarColor);
ColorFade.fadeToolbarTitleColor(toolbar, textColorPrimary,
new ColorFade.ToolbarTitleFadeCallback() {
@Override
public void setTitle(Toolbar toolbar) {
toolbar.setTitle(currentDir.getPath());
}
});
//fade overflow menu icon
ColorFade.fadeDrawableColor(toolbar.getOverflowIcon(), accentTextColor, textColorSecondary);
Drawable navIcon = toolbar.getNavigationIcon();
if (navIcon instanceof Animatable) {
((Animatable) navIcon).start();
ColorFade.fadeDrawableColor(navIcon, accentTextColor, textColorSecondary);
}
new Handler().postDelayed(new Runnable() {
@Override
public void run() {
Drawable d;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
AnimatedVectorDrawable drawable = (AnimatedVectorDrawable)
ContextCompat.getDrawable(FileExplorerActivity.this,
R.drawable.back_to_cancel_avd);
//mutating avd to reset it
drawable.mutate();
d = drawable;
} else {
d = ContextCompat.getDrawable(FileExplorerActivity.this,
R.drawable.ic_arrow_back_white_24dp);
}
d = DrawableCompat.wrap(d);
DrawableCompat.setTint(d.mutate(), textColorSecondary);
toolbar.setNavigationIcon(d);
//hide menu items
for (int i = 0; i < menu.size(); i++) {
MenuItem item = menu.getItem(i);
switch (item.getItemId()) {
case R.id.exclude:
case R.id.scan:
item.setVisible(true);
break;
default:
item.setVisible(false);
break;
}
}
}
}, navIcon instanceof Animatable ? (int) (500 * Util.getAnimatorSpeed(this)) : 0);
}
@Override
public int getDarkThemeRes() {
return R.style.CameraRoll_Theme_Translucent_FileExplorer;
}
@Override
public int getLightThemeRes() {
return R.style.CameraRoll_Theme_Light_Translucent_FileExplorer;
}
@Override
public void onThemeApplied(Theme theme) {
FloatingActionButton fab = findViewById(R.id.fab);
fab.setBackgroundTintList(ColorStateList.valueOf(accentColor));
if (theme.darkStatusBarIcons()) {
Util.setDarkStatusBarIcons(findViewById(R.id.root_view));
} else {
Util.setLightStatusBarIcons(findViewById(R.id.root_view));
}
final Toolbar toolbar = findViewById(R.id.toolbar);
toolbar.setActivated(theme.elevatedToolbar());
if (theme.statusBarOverlay()) {
addStatusBarOverlay(toolbar);
}
}
@Override
public BroadcastReceiver getDefaultLocalBroadcastReceiver() {
return new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
switch (intent.getAction()) {
case FileOperation.RESULT_DONE:
loadDirectory(currentDir.getPath());
break;
case FileOperation.FAILED:
break;
default:
break;
}
}
};
}
@Override
public IntentFilter getBroadcastIntentFilter() {
return FileOperation.Util.getIntentFilter(super.getBroadcastIntentFilter());
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.metron.stellar.dsl.functions;
import org.apache.metron.stellar.common.utils.StellarProcessorUtils;
import org.apache.metron.stellar.dsl.ParseException;
import org.junit.Assert;
import org.junit.Test;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
public class SetFunctionsTest {
@Test(expected=ParseException.class)
public void multisetInitTest_wrongType() throws Exception {
Map<Object, Integer> s = (Map<Object, Integer>) StellarProcessorUtils.run("MULTISET_INIT({ 'foo' : 'bar'})", new HashMap<>());
}
@Test
public void multisetInitTest() throws Exception {
{
Map<Object, Integer> s = (Map<Object, Integer>) StellarProcessorUtils.run("MULTISET_INIT()", new HashMap<>());
Assert.assertEquals(0, s.size());
}
//int initialization
{
Map<Object, Integer> s = (Map<Object, Integer>) StellarProcessorUtils.run("MULTISET_INIT([1,2,3,2])", new HashMap<>());
Assert.assertEquals(3, s.size());
Assert.assertTrue(s.containsKey(1));
Assert.assertEquals(1,(int)s.get(1));
Assert.assertTrue(s.containsKey(2));
Assert.assertEquals(2,(int)s.get(2));
Assert.assertTrue(s.containsKey(3));
Assert.assertEquals(1,(int)s.get(3));
}
//string initialization
{
Map<Object, Integer> s = (Map<Object, Integer>) StellarProcessorUtils.run("MULTISET_INIT(['one','two','three','two'])", new HashMap<>());
Assert.assertEquals(3, s.size());
Assert.assertTrue(s.containsKey("one"));
Assert.assertEquals(1,(int)s.get("one"));
Assert.assertTrue(s.containsKey("two"));
Assert.assertEquals(2,(int)s.get("two"));
Assert.assertTrue(s.containsKey("three"));
Assert.assertEquals(1,(int)s.get("three"));
}
}
@Test
public void multisetAddTest() throws Exception {
{
Map<Object, Integer> s = (Map<Object, Integer>) StellarProcessorUtils.run("MULTISET_ADD(MULTISET_INIT(), 1)", new HashMap<>());
Assert.assertEquals(1, s.size());
Assert.assertTrue(s.containsKey(1));
Assert.assertEquals(1,(int)s.get(1));
}
{
Map<Object, Integer> s = (Map<Object, Integer>) StellarProcessorUtils.run("MULTISET_ADD(null, 1)", new HashMap<>());
Assert.assertEquals(1, s.size());
Assert.assertTrue(s.containsKey(1));
Assert.assertEquals(1,(int)s.get(1));
}
//int
{
Map<Object, Integer> s = (Map<Object, Integer>) StellarProcessorUtils.run("MULTISET_ADD(MULTISET_INIT([1,2,3,4,4]), 4)", new HashMap<>());
Assert.assertEquals(4, s.size());
Assert.assertTrue(s.containsKey(1));
Assert.assertEquals(1,(int)s.get(1));
Assert.assertTrue(s.containsKey(2));
Assert.assertEquals(1,(int)s.get(2));
Assert.assertTrue(s.containsKey(3));
Assert.assertEquals(1,(int)s.get(3));
Assert.assertTrue(s.containsKey(4));
Assert.assertEquals(3,(int)s.get(4));
}
//string
{
Map<Object, Integer> s = (Map<Object, Integer>) StellarProcessorUtils.run("MULTISET_ADD(MULTISET_INIT(['one','two','three', 'four', 'four']), 'four')", new HashMap<>());
Assert.assertEquals(4, s.size());
Assert.assertTrue(s.containsKey("one"));
Assert.assertEquals(1,(int)s.get("one"));
Assert.assertTrue(s.containsKey("two"));
Assert.assertEquals(1,(int)s.get("two"));
Assert.assertTrue(s.containsKey("three"));
Assert.assertEquals(1,(int)s.get("three"));
Assert.assertTrue(s.containsKey("four"));
Assert.assertEquals(3,(int)s.get("four"));
}
}
@Test
public void multisetRemoveTest() throws Exception {
{
Map<Object, Integer> s = (Map<Object, Integer>) StellarProcessorUtils.run("MULTISET_REMOVE(MULTISET_INIT([1]), 1)", new HashMap<>());
Assert.assertEquals(0, s.size());
}
{
Map<Object, Integer> s = (Map<Object, Integer>) StellarProcessorUtils.run("MULTISET_REMOVE(null, 1)", new HashMap<>());
Assert.assertEquals(0, s.size());
}
//int
{
Map<Object, Integer> s = (Map<Object, Integer>) StellarProcessorUtils.run("MULTISET_REMOVE(MULTISET_INIT([1,2,3,2]), 2)", new HashMap<>());
Assert.assertEquals(3, s.size());
Assert.assertTrue(s.containsKey(1));
Assert.assertEquals(1, (int)s.get(1));
Assert.assertTrue(s.containsKey(2));
Assert.assertEquals(1, (int)s.get(2));
Assert.assertTrue(s.containsKey(3));
Assert.assertEquals(1, (int)s.get(3));
}
//string
{
Map<Object, Integer> s = (Map<Object, Integer>) StellarProcessorUtils.run("MULTISET_REMOVE(MULTISET_INIT(['one','two','three', 'two']), 'two')", new HashMap<>());
Assert.assertEquals(3, s.size());
Assert.assertTrue(s.containsKey("one"));
Assert.assertEquals(1, (int)s.get("one"));
Assert.assertTrue(s.containsKey("two"));
Assert.assertEquals(1, (int)s.get("two"));
Assert.assertTrue(s.containsKey("three"));
Assert.assertEquals(1, (int)s.get("three"));
}
}
@Test(expected=ParseException.class)
public void multisetMergeTest_wrongType() throws Exception {
Map<Object, Integer> s = (Map<Object, Integer>) StellarProcessorUtils.run("MULTISET_MERGE({ 'bar' : 'foo' } )", new HashMap<>());
}
@Test
public void multisetMergeTest() throws Exception {
{
Map<Object, Integer> s = (Map<Object, Integer>) StellarProcessorUtils.run("MULTISET_MERGE([MULTISET_INIT(), MULTISET_INIT(null), null])", new HashMap<>());
Assert.assertEquals(0, s.size());
}
//int
{
Map<Object, Integer> s = (Map<Object, Integer>) StellarProcessorUtils.run("MULTISET_MERGE([MULTISET_INIT([1,2]), MULTISET_INIT([2,3]), null, MULTISET_INIT()])", new HashMap<>());
Assert.assertEquals(3, s.size());
Assert.assertTrue(s.containsKey(1));
Assert.assertEquals(1, (int)s.get(1));
Assert.assertTrue(s.containsKey(2));
Assert.assertEquals(2, (int)s.get(2));
Assert.assertTrue(s.containsKey(3));
Assert.assertEquals(1, (int)s.get(3));
}
//string
{
Map<Object, Integer> s = (Map<Object, Integer>)StellarProcessorUtils.run("MULTISET_MERGE([MULTISET_INIT(['one','two']), MULTISET_INIT(['two', 'three'])])", new HashMap<>());
Assert.assertEquals(3, s.size());
Assert.assertTrue(s.containsKey("one"));
Assert.assertEquals(1, (int)s.get("one"));
Assert.assertTrue(s.containsKey("two"));
Assert.assertEquals(2, (int)s.get("two"));
Assert.assertTrue(s.containsKey("three"));
Assert.assertEquals(1, (int)s.get("three"));
}
}
@Test(expected=ParseException.class)
public void setInitTest_wrongType() throws Exception {
Set s = (Set) StellarProcessorUtils.run("SET_INIT({ 'foo' : 2})", new HashMap<>());
}
@Test
public void setInitTest() throws Exception {
{
Set s = (Set) StellarProcessorUtils.run("SET_INIT()", new HashMap<>());
Assert.assertEquals(0, s.size());
}
//int initialization
{
Set s = (Set) StellarProcessorUtils.run("SET_INIT([1,2,3])", new HashMap<>());
Assert.assertEquals(3, s.size());
Assert.assertTrue(s.contains(1));
Assert.assertTrue(s.contains(2));
Assert.assertTrue(s.contains(3));
}
//string initialization
{
Set s = (Set) StellarProcessorUtils.run("SET_INIT(['one','two','three'])", new HashMap<>());
Assert.assertEquals(3, s.size());
Assert.assertTrue(s.contains("one"));
Assert.assertTrue(s.contains("two"));
Assert.assertTrue(s.contains("three"));
}
}
@Test
public void multisetToSetTest() throws Exception {
{
Set s = (Set) StellarProcessorUtils.run("MULTISET_TO_SET(MULTISET_ADD(MULTISET_INIT(), 1))", new HashMap<>());
Assert.assertEquals(1, s.size());
Assert.assertTrue(s.contains(1));
}
{
Set s = (Set) StellarProcessorUtils.run("MULTISET_TO_SET(MULTISET_ADD(null, 1))", new HashMap<>());
Assert.assertEquals(1, s.size());
Assert.assertTrue(s.contains(1));
}
//int
{
Set s = (Set) StellarProcessorUtils.run("MULTISET_TO_SET(MULTISET_ADD(MULTISET_INIT([1,2,3]), 4))", new HashMap<>());
Assert.assertEquals(4, s.size());
Assert.assertTrue(s.contains(1));
Assert.assertTrue(s.contains(2));
Assert.assertTrue(s.contains(3));
Assert.assertTrue(s.contains(4));
}
//string
{
Set s = (Set) StellarProcessorUtils.run("MULTISET_TO_SET(MULTISET_ADD(MULTISET_INIT(['one','two','three']), 'four'))", new HashMap<>());
Assert.assertEquals(4, s.size());
Assert.assertTrue(s.contains("one"));
Assert.assertTrue(s.contains("two"));
Assert.assertTrue(s.contains("three"));
Assert.assertTrue(s.contains("four"));
}
}
@Test
public void setAddTest() throws Exception {
{
Set s = (Set) StellarProcessorUtils.run("SET_ADD(SET_INIT(), 1)", new HashMap<>());
Assert.assertEquals(1, s.size());
Assert.assertTrue(s.contains(1));
}
{
Set s = (Set) StellarProcessorUtils.run("SET_ADD(null, 1)", new HashMap<>());
Assert.assertEquals(1, s.size());
Assert.assertTrue(s.contains(1));
}
//int
{
Set s = (Set) StellarProcessorUtils.run("SET_ADD(SET_INIT([1,2,3]), 4)", new HashMap<>());
Assert.assertEquals(4, s.size());
Assert.assertTrue(s.contains(1));
Assert.assertTrue(s.contains(2));
Assert.assertTrue(s.contains(3));
Assert.assertTrue(s.contains(4));
}
//string
{
Set s = (Set) StellarProcessorUtils.run("SET_ADD(SET_INIT(['one','two','three']), 'four')", new HashMap<>());
Assert.assertEquals(4, s.size());
Assert.assertTrue(s.contains("one"));
Assert.assertTrue(s.contains("two"));
Assert.assertTrue(s.contains("three"));
Assert.assertTrue(s.contains("four"));
}
}
@Test
public void setRemoveTest() throws Exception {
{
Set s = (Set) StellarProcessorUtils.run("SET_REMOVE(SET_INIT([1]), 1)", new HashMap<>());
Assert.assertEquals(0, s.size());
}
{
Set s = (Set) StellarProcessorUtils.run("SET_REMOVE(null, 1)", new HashMap<>());
Assert.assertEquals(0, s.size());
}
//int
{
Set s = (Set) StellarProcessorUtils.run("SET_REMOVE(SET_INIT([1,2,3]), 2)", new HashMap<>());
Assert.assertEquals(2, s.size());
Assert.assertTrue(s.contains(1));
Assert.assertTrue(s.contains(3));
}
//string
{
Set s = (Set) StellarProcessorUtils.run("SET_REMOVE(SET_INIT(['one','two','three']), 'three')", new HashMap<>());
Assert.assertEquals(2, s.size());
Assert.assertTrue(s.contains("one"));
Assert.assertTrue(s.contains("two"));
}
}
@Test(expected=ParseException.class)
public void setMergeTest_wrongType() throws Exception {
Set s = (Set) StellarProcessorUtils.run("SET_MERGE({ 'foo' : 'bar'} )", new HashMap<>());
}
@Test
public void setMergeTest() throws Exception {
{
Set s = (Set) StellarProcessorUtils.run("SET_MERGE([SET_INIT(), SET_INIT(null), null])", new HashMap<>());
Assert.assertEquals(0, s.size());
}
//int
{
Set s = (Set) StellarProcessorUtils.run("SET_MERGE([SET_INIT([1,2]), SET_INIT([3]), null, SET_INIT()])", new HashMap<>());
Assert.assertEquals(3, s.size());
Assert.assertTrue(s.contains(1));
Assert.assertTrue(s.contains(2));
Assert.assertTrue(s.contains(3));
}
//string
{
Set s = (Set) StellarProcessorUtils.run("SET_MERGE([SET_INIT(['one','two']), SET_INIT(['three'])])", new HashMap<>());
Assert.assertEquals(3, s.size());
Assert.assertTrue(s.contains("one"));
Assert.assertTrue(s.contains("two"));
Assert.assertTrue(s.contains("three"));
}
}
}
| |
package br.org.pythonbrasil.pyeventos.model;
import android.os.Parcel;
import android.os.Parcelable;
import android.text.TextUtils;
import java.util.Date;
import java.util.List;
import br.org.pythonbrasil.pyeventos.api.PythonBrasilUrls;
import br.org.pythonbrasil.pyeventos.db.DatabaseManager;
public class Event implements Parcelable {
private long id;
private Day day;
private Date startTime;
private Date endTime;
private String roomName;
private String slug;
private String title;
private String subTitle;
private Track track;
private String abstractText;
private String description;
private String personsSummary;
private List<Person> persons; // Optional
private List<Link> links; // Optional
public Event() {
}
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public Day getDay() {
return day;
}
public void setDay(Day day) {
this.day = day;
}
public Date getStartTime() {
return startTime;
}
public void setStartTime(Date startTime) {
this.startTime = startTime;
}
public Date getEndTime() {
return endTime;
}
public void setEndTime(Date endTime) {
this.endTime = endTime;
}
public boolean isRunningAtTime(long time) {
return (startTime != null) && (endTime != null) && (startTime.getTime() < time) && (time < endTime.getTime());
}
/**
* @return The event duration in minutes
*/
public int getDuration() {
if ((startTime == null) || (endTime == null)) {
return 0;
}
return (int) ((this.endTime.getTime() - this.startTime.getTime()) / 1000L);
}
public String getRoomName() {
return (roomName == null) ? "" : roomName;
}
public void setRoomName(String roomName) {
this.roomName = roomName;
}
public String getSlug() {
return slug;
}
public void setSlug(String slug) {
this.slug = slug;
}
public String getUrl() {
return PythonBrasilUrls.getEvent(slug, DatabaseManager.getInstance().getYear());
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getSubTitle() {
return subTitle;
}
public void setSubTitle(String subTitle) {
this.subTitle = subTitle;
}
public Track getTrack() {
return track;
}
public void setTrack(Track track) {
this.track = track;
}
public String getAbstractText() {
return abstractText;
}
public void setAbstractText(String abstractText) {
this.abstractText = abstractText;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String getPersonsSummary() {
if (personsSummary != null) {
return personsSummary;
}
if (persons != null) {
return TextUtils.join(", ", persons);
}
return "";
}
public void setPersonsSummary(String personsSummary) {
this.personsSummary = personsSummary;
}
public List<Person> getPersons() {
return persons;
}
public void setPersons(List<Person> persons) {
this.persons = persons;
}
public List<Link> getLinks() {
return links;
}
public void setLinks(List<Link> links) {
this.links = links;
}
@Override
public String toString() {
return title;
}
@Override
public int hashCode() {
return (int) (id ^ (id >>> 32));
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
Event other = (Event) obj;
return id == other.id;
}
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(Parcel out, int flags) {
out.writeLong(id);
day.writeToParcel(out, flags);
out.writeLong((startTime == null) ? 0L : startTime.getTime());
out.writeLong((endTime == null) ? 0L : endTime.getTime());
out.writeString(roomName);
out.writeString(slug);
out.writeString(title);
out.writeString(subTitle);
track.writeToParcel(out, flags);
out.writeString(abstractText);
out.writeString(description);
out.writeString(personsSummary);
out.writeTypedList(persons);
out.writeTypedList(links);
}
public static final Parcelable.Creator<Event> CREATOR = new Parcelable.Creator<Event>() {
public Event createFromParcel(Parcel in) {
return new Event(in);
}
public Event[] newArray(int size) {
return new Event[size];
}
};
private Event(Parcel in) {
id = in.readLong();
day = Day.CREATOR.createFromParcel(in);
long time = in.readLong();
if (time != 0L) {
startTime = new Date(time);
}
time = in.readLong();
if (time != 0L) {
endTime = new Date(time);
}
roomName = in.readString();
slug = in.readString();
title = in.readString();
subTitle = in.readString();
track = Track.CREATOR.createFromParcel(in);
abstractText = in.readString();
description = in.readString();
personsSummary = in.readString();
persons = in.createTypedArrayList(Person.CREATOR);
links = in.createTypedArrayList(Link.CREATOR);
}
}
| |
package com.aventura.math.vector;
import static org.junit.Assert.*;
import org.junit.Test;
import com.aventura.math.Constants;
public class TestMatrix3 {
@Test
public void testMatrix3_0() {
System.out.println("***** Test Matrix3 : testMatrix3_0 *****");
Matrix3 A = new Matrix3();
Matrix3 B = new Matrix3(0);
Matrix3 C = new Matrix3(7);
System.out.println("A="+A);
System.out.println("B="+B);
System.out.println("C="+C);
if (!A.equals(B)) fail("A does not equals B");
}
@Test
public void testMatrix3_array_0() {
System.out.println("***** Test Matrix3 : testMatrix3_array() *****");
float[][] array = new float[3][3];
for (int i=0; i<3; i++) {
for (int j=0; j<3; j++) {
array[i][j] = 0;
}
}
Matrix3 A;
Matrix3 B;
A = new Matrix3(array);
B = new Matrix3();
System.out.println("A="+A);
System.out.println("B="+B);
if (!A.equals(B)) fail("A does not equals B");
}
@Test
public void testMatrix3_array_value() {
System.out.println("***** Test Matrix3 : testMatrix3_array_value() *****");
float[][] array = new float[3][3];
for (int i=0; i<3; i++) {
for (int j=0; j<3; j++) {
array[i][j] = 5;
}
}
array[1][2]=22.3f;
Matrix3 A;
Matrix3 B;
A = new Matrix3(array);
B = new Matrix3(5);
B.set(1,2,22.3f);
System.out.println("A="+A);
System.out.println("B="+B);
if (!A.equals(B)) fail("A does not equals B");
}
@Test
public void testMatrix3_plus() {
System.out.println("***** Test Matrix3 : testMatrix3_plus() *****");
float[][] array = new float[3][3];
for (int i=0; i<3; i++) {
for (int j=0; j<3; j++) {
array[i][j] = i+j;
}
}
Matrix3 A;
Matrix3 B;
Matrix3 C;
A = new Matrix3(array);
B = new Matrix3(5);
C = A.plus(B);
System.out.println("A="+A);
System.out.println("B="+B);
System.out.println("C="+C);
for (int i=0; i<3; i++) {
for (int j=0; j<3; j++) {
if (C.get(i,j) != i+j+5) fail("C does not equals A+B");
}
}
}
@Test
public void testMatrix3_minus() {
System.out.println("***** Test Matrix3 : testMatrix3_minus() *****");
float[][] array = new float[3][3];
for (int i=0; i<3; i++) {
for (int j=0; j<3; j++) {
array[i][j] = i+j;
}
}
Matrix3 A;
Matrix3 B;
Matrix3 C;
A = new Matrix3(array);
B = new Matrix3(2);
C = A.minus(B);
System.out.println("A="+A);
System.out.println("B="+B);
System.out.println("C="+C);
for (int i=0; i<3; i++) {
for (int j=0; j<3; j++) {
if (C.get(i,j) != i+j-2) fail("C does not equals A-B");
}
}
}
@Test
public void testMatrix3_plusEquals() {
System.out.println("***** Test Matrix3 : testMatrix3_plusEquals() *****");
float[][] array1 = new float[3][3];
float[][] array2 = new float[3][3];
for (int i=0; i<3; i++) {
for (int j=0; j<3; j++) {
array1[i][j] = i+j;
array2[i][j] = i-j +7;
}
}
/*
* A=[[0.0, 1.0, 2.0]
* [1.0, 2.0, 3.0]
* [2.0, 3.0, 4.0]]
*
* B=[[7.0, 6.0, 5.0]
* [8.0, 7.0, 6.0]
* [9.0, 8.0, 7.0]]
*/
Matrix3 A;
Matrix3 B;
A = new Matrix3(array1);
B = new Matrix3(array2);
System.out.println("A="+A);
System.out.println("B="+B);
A.plusEquals(B);
System.out.println("A="+A);
System.out.println("B="+B);
if (!(A.get(0,0) == 7.0 && A.get(0,1) == 7.0 && A.get(0,2) == 7.0)) fail("A does not equals A+B");
if (!(A.get(1,0) == 9.0 && A.get(1,1) == 9.0 && A.get(1,2) == 9.0)) fail("A does not equals A+B");
if (!(A.get(2,0) == 11.0 && A.get(2,1) == 11.0 && A.get(2,2) == 11.0)) fail("A does not equals A+B");
}
@Test
public void testMatrix3_minusEquals() {
System.out.println("***** Test Matrix3 : testMatrix3_minusEquals() *****");
float[][] array1 = new float[3][3];
float[][] array2 = new float[3][3];
for (int i=0; i<3; i++) {
for (int j=0; j<3; j++) {
array1[i][j] = i+j;
array2[i][j] = i-j +7;
}
}
/*
* A=[[0.0, 1.0, 2.0]
* [1.0, 2.0, 3.0]
* [2.0, 3.0, 4.0]]
*
* B=[[7.0, 6.0, 5.0]
* [8.0, 7.0, 6.0]
* [9.0, 8.0, 7.0]]
*/
Matrix3 A;
Matrix3 B;
A = new Matrix3(array1);
B = new Matrix3(array2);
System.out.println("A="+A);
System.out.println("B="+B);
A.minusEquals(B);
System.out.println("A="+A);
System.out.println("B="+B);
if (!(A.get(0,0) == -7.0 && A.get(0,1) == -5.0 && A.get(0,2) == -3.0)) fail("A does not equals A-B");
if (!(A.get(1,0) == -7.0 && A.get(1,1) == -5.0 && A.get(1,2) == -3.0)) fail("A does not equals A-B");
if (!(A.get(2,0) == -7.0 && A.get(2,1) == -5.0 && A.get(2,2) == -3.0)) fail("A does not equals A-B");
}
@Test
public void testMatrix3_times() {
System.out.println("***** Test Matrix3 : testMatrix3_times() *****");
float[][] array1 = new float[3][3];
float[][] array2 = new float[3][3];
for (int i=0; i<3; i++) {
for (int j=0; j<3; j++) {
array1[i][j] = i+j;
array2[i][j] = i-j +7;
}
}
/*
* A=[[0.0, 1.0, 2.0]
* [1.0, 2.0, 3.0]
* [2.0, 3.0, 4.0]]
*
* B=[[7.0, 6.0, 5.0]
* [8.0, 7.0, 6.0]
* [9.0, 8.0, 7.0]]
*/
Matrix3 A;
Matrix3 B;
A = new Matrix3(array1);
B = new Matrix3(array2);
System.out.println("A="+A);
System.out.println("B="+B);
Matrix3 C = A.times(B);
System.out.println("C="+C);
if (!(C.get(0,0) == 26.0 && C.get(0,1) == 23.0 && C.get(0,2) == 20.0)) fail("C does not equals A^B");
if (!(C.get(1,0) == 50.0 && C.get(1,1) == 44.0 && C.get(1,2) == 38.0)) fail("C does not equals A^B");
if (!(C.get(2,0) == 74.0 && C.get(2,1) == 65.0 && C.get(2,2) == 56.0)) fail("C does not equals A^B");
}
@Test
public void testMatrix3_transpose1() {
System.out.println("***** Test Matrix3 : testMatrix3_transpose1() *****");
float[][] array = new float[3][3];
for (int i=0; i<3; i++) {
for (int j=0; j<3; j++) {
array[i][j] = i-j+2;
}
}
/*
* A=[[2.0, 1.0, 0.0]
* [3.0, 2.0, 1.0]
* [4.0, 3.0, 2.0]]
*/
Matrix3 A;
A = new Matrix3(array);
System.out.println("A="+A);
Matrix3 B = A.transpose();
System.out.println("B="+B);
Matrix3 C = B.transpose();
if (!A.equals(C)) fail("C =transpose(transpose(A)) does not equals A");
}
@Test
public void testMatrix3_transpose2() {
System.out.println("***** Test Matrix3 : testMatrix3_transpose2() *****");
float[][] array = new float[3][3];
for (int i=0; i<3; i++) {
for (int j=0; j<3; j++) {
array[i][j] = i-j+2;
}
}
/*
* A=[[2.0, 1.0, 0.0]
* [3.0, 2.0, 1.0]
* [4.0, 3.0, 2.0]]
*/
Matrix3 A;
A = new Matrix3(array);
System.out.println("A="+A);
Matrix3 B = new Matrix3(A); // Keep image of A before transposition
A.transposeEquals();
System.out.println("A transposed ="+A);
Matrix3 C = A.transpose(); // Do not modify A for this transposition
if (!B.equals(C)) fail("transpose(transpose(A)) does not equals A");
}
@Test
public void testMatrix3_inverse1() {
System.out.println("***** Test Matrix3 : testMatrix3_inverse1() *****");
float[][] array = new float[3][3];
for (int i=0; i<3; i++) {
for (int j=0; j<3; j++) {
if (i>j) {
array[i][j] = 0;
} else {
array[i][j] = 10-2*i-j;
}
}
}
/*
* A=[[10.0, 9.0, 8.0]
* [0.0, 7.0, 6.0]
* [0.0, 0.0, 4.0]
* [0.0, 0.0, 0.0]]
*/
Matrix3 A, B, C;
A = new Matrix3(array);
B= null;
System.out.println("A="+A);
try {
B = A.inverse(); // Calculate inverse
System.out.println("B="+B);
} catch (NotInvertibleMatrixException e) {
fail("Not invertible Matrix A");
}
try {
C = B.inverse(); // Inverse the inverse
System.out.println("C ="+C);
if (!A.equalsEpsilon(C, Constants.EPSILON)) fail("inverse(inverse(A)) does not equals A");
} catch (NotInvertibleMatrixException e) {
fail("Not invertible Matrix B");
}
}
@Test
public void testMatrix3_inverse2() {
System.out.println("***** Test Matrix3 : testMatrix3_inverse2() *****");
float[][] array = new float[3][3];
for (int i=0; i<3; i++) {
for (int j=0; j<3; j++) {
if (i>j) {
array[i][j] = 0;
} else {
array[i][j] = 10-2*i-j;
}
}
}
/*
* A=[[10.0, 9.0, 8.0, 7.0]
* [0.0, 7.0, 6.0, 5.0]
* [0.0, 0.0, 4.0, 3.0]
* [0.0, 0.0, 0.0, 1.0]]
*/
Matrix3 A;
A = new Matrix3(array);
System.out.println("A="+A);
try {
Matrix3 B = A.inverse(); // Calculate inverse
System.out.println("B="+B);
Matrix3 C = B.times(A); // inverse(A).A = I
System.out.println("C ="+C);
if (!C.equalsEpsilon(Matrix3.IDENTITY, Constants.EPSILON)) fail("A.inverse(A) does not equals I");
} catch (NotInvertibleMatrixException e) {
fail("Not invertible Matrix");
}
}
@Test
public void testMatrix3_inverse3() {
System.out.println("***** Test Matrix3 : testMatrix3_inverse3() *****");
/*
* A=[[1.0, 0.0, 0.0]
* [0.0, 1.0, 0.0]
* [0.0, 0.0, 1.0]]
*/
Matrix3 A;
A = new Matrix3(Matrix3.IDENTITY);
System.out.println("A="+A);
try {
Matrix3 B = A.inverse(); // Calculate inverse
System.out.println("B="+B);
if (!B.equals(Matrix3.IDENTITY)) fail("inverse of Identity does not equals I");
} catch (NotInvertibleMatrixException e) {
fail("Not invertible Matrix");
}
}
}
| |
// Copyright 2012 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.collide.client.editor;
import com.google.collide.client.util.logging.Log;
import com.google.collide.json.shared.JsonArray;
import com.google.collide.shared.document.Document;
import com.google.collide.shared.document.Line;
import com.google.collide.shared.document.LineInfo;
import com.google.collide.shared.document.anchor.Anchor;
import com.google.collide.shared.document.anchor.Anchor.RemovalStrategy;
import com.google.collide.shared.document.anchor.AnchorManager;
import com.google.collide.shared.document.anchor.AnchorType;
import com.google.collide.shared.util.ListenerRegistrar.Remover;
import com.google.collide.shared.util.SortedList;
import com.google.collide.shared.util.SortedList.OneWayIntComparator;
/**
* This class takes care of mapping between the different coordinates used by
* the editor. The two supported systems are:
* <ul>
* <li>Offset (x,y) - in pixels, relative to the top left of line 0 in the
* current document.
* <li>Line (line, column) - the real line number and column, taking into
* account spacer objects in between lines. Lines and columns are 0-indexed.
* </ul>
*/
class CoordinateMap implements Document.LineListener {
interface DocumentSizeProvider {
float getEditorCharacterWidth();
int getEditorLineHeight();
void handleSpacerHeightChanged(Spacer spacer, int oldHeight);
}
private static class OffsetCache {
private static final SortedList.Comparator<OffsetCache> COMPARATOR =
new SortedList.Comparator<OffsetCache>() {
@Override
public int compare(OffsetCache a, OffsetCache b) {
return a.offset - b.offset;
}
};
private static final SortedList.OneWayIntComparator<OffsetCache> Y_OFFSET_ONE_WAY_COMPARATOR =
new SortedList.OneWayIntComparator<OffsetCache>() {
@Override
public int compareTo(OffsetCache s) {
return value - s.offset;
}
};
private static final SortedList.OneWayIntComparator<OffsetCache> LINE_NUMBER_ONE_WAY_COMPARATOR
= new SortedList.OneWayIntComparator<OffsetCache>() {
@Override
public int compareTo(OffsetCache s) {
return value - s.lineNumber;
}
};
private final int offset;
private final int height;
private final int lineNumber;
private OffsetCache(int offset, int lineNumber, int height) {
this.offset = offset;
this.height = height;
this.lineNumber = lineNumber;
}
}
private static final OffsetCache BEGINNING_EMPTY_OFFSET_CACHE = new OffsetCache(0, 0, 0);
private static final AnchorType SPACER_ANCHOR_TYPE = AnchorType.create(CoordinateMap.class,
"spacerAnchorType");
private static final Spacer.Comparator SPACER_COMPARATOR = new Spacer.Comparator();
private static final Spacer.OneWaySpacerComparator SPACER_ONE_WAY_COMPARATOR =
new Spacer.OneWaySpacerComparator();
/** Used by {@link #getPrecedingOffsetCache(int, int)} */
private static final int IGNORE = Integer.MIN_VALUE;
private Document document;
private DocumentSizeProvider documentSizeProvider;
/** List of offset cache items, sorted by the offset */
private SortedList<OffsetCache> offsetCache;
/**
* True if there is at least one spacer in the editor, false otherwise (false
* means a simple height / line height calculation can be used)
*/
private boolean requiresMapping;
/** Sorted by line number */
private SortedList<Spacer> spacers;
/** Summation of all spacers' heights */
private int totalSpacerHeight;
/** Remover for listener */
private Remover documentLineListenerRemover;
CoordinateMap(DocumentSizeProvider documentSizeProvider) {
this.documentSizeProvider = documentSizeProvider;
requiresMapping = false;
}
int convertYToLineNumber(int y) {
if (y < 0) {
return 0;
}
int lineHeight = documentSizeProvider.getEditorLineHeight();
if (!requiresMapping) {
return y / lineHeight;
}
OffsetCache precedingOffsetCache = getPrecedingOffsetCache(y, IGNORE);
int precedingOffsetCacheBottom = precedingOffsetCache.offset + precedingOffsetCache.height;
int lineNumberRelativeToOffsetCacheLine = (y - precedingOffsetCacheBottom) / lineHeight;
if (y < precedingOffsetCacheBottom) {
// y is inside the spacer
return precedingOffsetCache.lineNumber;
} else {
return precedingOffsetCache.lineNumber + lineNumberRelativeToOffsetCacheLine;
}
}
/**
* Returns the top of the given line.
*/
int convertLineNumberToY(int lineNumber) {
int lineHeight = documentSizeProvider.getEditorLineHeight();
if (!requiresMapping) {
return lineNumber * lineHeight;
}
OffsetCache precedingOffsetCache = getPrecedingOffsetCache(IGNORE, lineNumber);
int precedingOffsetCacheBottom = precedingOffsetCache.offset + precedingOffsetCache.height;
int offsetRelativeToOffsetCacheBottom =
(lineNumber - precedingOffsetCache.lineNumber) * lineHeight;
return precedingOffsetCacheBottom + offsetRelativeToOffsetCacheBottom;
}
/**
* Returns the first {@link OffsetCache} that is positioned less than or equal
* to {@code y} or {@code lineNumber}. This methods fills the
* {@link #offsetCache} if necessary ensuring the returned {@link OffsetCache}
* is up-to-date.
*
* @param y the y, or {@link #IGNORE} if looking up by {@code lineNumber}
* @param lineNumber the line number, or {@link #IGNORE} if looking up by
* {@code y}
*/
private OffsetCache getPrecedingOffsetCache(int y, int lineNumber) {
assert (y != IGNORE && lineNumber == IGNORE) || (lineNumber != IGNORE && y == IGNORE);
final int lineHeight = documentSizeProvider.getEditorLineHeight();
OffsetCache previousOffsetCache;
if (y != IGNORE) {
previousOffsetCache =
getCachedPrecedingOffsetCacheImpl(OffsetCache.Y_OFFSET_ONE_WAY_COMPARATOR, y);
} else {
previousOffsetCache =
getCachedPrecedingOffsetCacheImpl(OffsetCache.LINE_NUMBER_ONE_WAY_COMPARATOR, lineNumber);
}
if (previousOffsetCache == null) {
if (spacers.size() > 0 && spacers.get(0).getLineNumber() == 0) {
previousOffsetCache = createOffsetCache(0, 0, spacers.get(0).getHeight());
} else {
previousOffsetCache = BEGINNING_EMPTY_OFFSET_CACHE;
}
}
/*
* Optimization so the common case that the target has previously been
* computed requires no more computation
*/
int offsetCacheSize = offsetCache.size();
if (offsetCacheSize > 0
&& isTargetEarlierThanOffsetCache(y, lineNumber, offsetCache.get(offsetCacheSize - 1))) {
return previousOffsetCache;
}
// This will return this offset cache's matching spacer
int spacerPos = getPrecedingSpacerIndex(previousOffsetCache.lineNumber);
/*
* We want the spacer following this offset cache's spacer, or the first
* spacer if none were found
*/
spacerPos++;
for (int n = spacers.size(); spacerPos < n; spacerPos++) {
Spacer curSpacer = spacers.get(spacerPos);
int previousOffsetCacheBottom = previousOffsetCache.offset + previousOffsetCache.height;
int simpleLinesHeight =
(curSpacer.getLineNumber() - previousOffsetCache.lineNumber) * lineHeight;
if (simpleLinesHeight == 0) {
Log.warn(Spacer.class, "More than one spacer on line " + previousOffsetCache.lineNumber);
}
// Create an offset cache for this spacer
OffsetCache curOffsetCache =
createOffsetCache(previousOffsetCacheBottom + simpleLinesHeight,
curSpacer.getLineNumber(), curSpacer.getHeight());
if (isTargetEarlierThanOffsetCache(y, lineNumber, curOffsetCache)) {
return previousOffsetCache;
}
previousOffsetCache = curOffsetCache;
}
return previousOffsetCache;
}
/**
* Returns the {@link OffsetCache} instance in list that has the greatest
* value less than or equal to the given {@code value}. Returns null if there
* isn't one.
*
* This should only be used by {@link #getPrecedingOffsetCache(int, int)}.
*/
private OffsetCache getCachedPrecedingOffsetCacheImpl(
OneWayIntComparator<OffsetCache> comparator, int value) {
comparator.setValue(value);
int index = offsetCache.findInsertionIndex(comparator, false);
return index >= 0 ? offsetCache.get(index) : null;
}
private boolean isTargetEarlierThanOffsetCache(int y, int lineNumber, OffsetCache offsetCache) {
return ((y != IGNORE && y < offsetCache.offset) ||
(lineNumber != IGNORE && lineNumber < offsetCache.lineNumber));
}
private OffsetCache createOffsetCache(int offset, int lineNumber, int height) {
OffsetCache createdOffsetCache = new OffsetCache(offset, lineNumber, height);
offsetCache.add(createdOffsetCache);
return createdOffsetCache;
}
private int getPrecedingSpacerIndex(int lineNumber) {
SPACER_ONE_WAY_COMPARATOR.setValue(lineNumber);
return spacers.findInsertionIndex(SPACER_ONE_WAY_COMPARATOR, false);
}
/**
* Adds a spacer above the given lineInfo line with height heightPx and
* returns the created Spacer object.
*
* @param lineInfo the line before which the spacer will be inserted
* @param height the height in pixels of the spacer
*/
Spacer createSpacer(LineInfo lineInfo, int height, Buffer buffer, String cssClass) {
int lineNumber = lineInfo.number();
// create an anchor on the current line
Anchor anchor =
document.getAnchorManager().createAnchor(SPACER_ANCHOR_TYPE, lineInfo.line(), lineNumber,
AnchorManager.IGNORE_COLUMN);
anchor.setRemovalStrategy(RemovalStrategy.SHIFT);
// account for the height of the line the spacer is on
Spacer spacer = new Spacer(anchor, height, this, buffer, cssClass);
spacers.add(spacer);
totalSpacerHeight += height;
invalidateLineNumberAndFollowing(lineNumber);
requiresMapping = true;
return spacer;
}
boolean removeSpacer(Spacer spacer) {
int lineNumber = spacer.getLineNumber();
if (spacers.remove(spacer)) {
document.getAnchorManager().removeAnchor(spacer.getAnchor());
totalSpacerHeight -= spacer.getHeight();
invalidateLineNumberAndFollowing(lineNumber - 1);
updateRequiresMapping();
return true;
}
return false;
}
void handleDocumentChange(Document document) {
if (documentLineListenerRemover != null) {
documentLineListenerRemover.remove();
}
this.document = document;
spacers = new SortedList<Spacer>(SPACER_COMPARATOR);
offsetCache =
new SortedList<OffsetCache>(OffsetCache.COMPARATOR);
documentLineListenerRemover = document.getLineListenerRegistrar().add(this);
requiresMapping = false; // starts with no items in list
totalSpacerHeight = 0;
}
@Override
public void onLineAdded(Document document, int lineNumber, JsonArray<Line> addedLines) {
invalidateLineNumberAndFollowing(lineNumber);
}
@Override
public void onLineRemoved(Document document, int lineNumber, JsonArray<Line> removedLines) {
invalidateLineNumberAndFollowing(lineNumber);
}
/**
* Call this after any line changes (adding/deleting lines, changing line
* heights). Only invalidate (delete) cache items >= lineNumber, don't
* recalculate.
*/
void invalidateLineNumberAndFollowing(int lineNumber) {
OffsetCache.LINE_NUMBER_ONE_WAY_COMPARATOR.setValue(lineNumber);
int insertionIndex = offsetCache.findInsertionIndex(OffsetCache.LINE_NUMBER_ONE_WAY_COMPARATOR);
offsetCache.removeThisAndFollowing(insertionIndex);
}
private void updateRequiresMapping() {
// check to change active status
requiresMapping = spacers.size() > 0;
}
int getTotalSpacerHeight() {
return totalSpacerHeight;
}
void handleSpacerHeightChanged(Spacer spacer, int oldHeight) {
totalSpacerHeight -= oldHeight;
totalSpacerHeight += spacer.getHeight();
invalidateLineNumberAndFollowing(spacer.getLineNumber());
documentSizeProvider.handleSpacerHeightChanged(spacer, oldHeight);
}
}
| |
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package apksigner;
import java.io.ByteArrayOutputStream;
import java.io.Console;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.PushbackInputStream;
import java.lang.reflect.Method;
import java.nio.ByteBuffer;
import java.nio.CharBuffer;
import java.nio.charset.Charset;
import java.nio.charset.CodingErrorAction;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Retriever of passwords based on password specs supported by {@code apksigner} tool.
*
* <p>apksigner supports retrieving multiple passwords from the same source (e.g., file, standard
* input) which adds the need to keep some sources open across password retrievals. This class
* addresses the need.
*
* <p>To use this retriever, construct a new instance, use {@link #getPasswords(String, String)} to
* retrieve passwords, and then invoke {@link #close()} on the instance when done, enabling the
* instance to release any held resources.
*/
class PasswordRetriever implements AutoCloseable {
public static final String SPEC_STDIN = "stdin";
private static final Charset CONSOLE_CHARSET = getConsoleEncoding();
private final Map<File, InputStream> mFileInputStreams = new HashMap<>();
private boolean mClosed;
/**
* Returns the provided password and all password variants derived from the password. The
* resulting list is guaranteed to contain at least one element.
*/
private static List<char[]> getPasswords(char[] pwd) {
List<char[]> passwords = new ArrayList<>(3);
addPasswords(passwords, pwd);
return passwords;
}
/**
* Returns the provided password and all password variants derived from the password. The
* resulting list is guaranteed to contain at least one element.
*
* @param encodedPwd password encoded using the provided character encoding.
* @param encodings character encodings in which the password is encoded in {@code encodedPwd}.
*/
private static List<char[]> getPasswords(byte[] encodedPwd, Charset... encodings) {
List<char[]> passwords = new ArrayList<>(4);
for (Charset encoding : encodings) {
// Decode password and add it and its variants to the list
try {
char[] pwd = decodePassword(encodedPwd, encoding);
addPasswords(passwords, pwd);
} catch (IOException ignored) {
}
}
// Add the original encoded form
addPassword(passwords, castBytesToChars(encodedPwd));
return passwords;
}
/**
* Adds the provided password and its variants to the provided list of passwords.
*
* <p>NOTE: This method adds only the passwords/variants which are not yet in the list.
*/
private static void addPasswords(List<char[]> passwords, char[] pwd) {
// Verbatim password
addPassword(passwords, pwd);
// Password encoded using the JVM default character encoding and upcast into char[]
try {
char[] encodedPwd = castBytesToChars(encodePassword(pwd, Charset.defaultCharset()));
addPassword(passwords, encodedPwd);
} catch (IOException ignored) {
}
// Password encoded using console character encoding and upcast into char[]
if (!CONSOLE_CHARSET.equals(Charset.defaultCharset())) {
try {
char[] encodedPwd = castBytesToChars(encodePassword(pwd, CONSOLE_CHARSET));
addPassword(passwords, encodedPwd);
} catch (IOException ignored) {
}
}
}
/**
* Adds the provided password to the provided list. Does nothing if the password is already in
* the list.
*/
private static void addPassword(List<char[]> passwords, char[] password) {
for (char[] existingPassword : passwords) {
if (Arrays.equals(password, existingPassword)) {
return;
}
}
passwords.add(password);
}
private static byte[] encodePassword(char[] pwd, Charset cs) throws IOException {
ByteBuffer pwdBytes = cs.newEncoder().onMalformedInput(CodingErrorAction.REPLACE).onUnmappableCharacter(
CodingErrorAction.REPLACE).encode(CharBuffer.wrap(pwd));
byte[] encoded = new byte[pwdBytes.remaining()];
pwdBytes.get(encoded);
return encoded;
}
private static char[] decodePassword(byte[] pwdBytes, Charset encoding) throws IOException {
CharBuffer pwdChars = encoding.newDecoder().onMalformedInput(CodingErrorAction.REPLACE).onUnmappableCharacter(
CodingErrorAction.REPLACE).decode(ByteBuffer.wrap(pwdBytes));
char[] result = new char[pwdChars.remaining()];
pwdChars.get(result);
return result;
}
/**
* Upcasts each {@code byte} in the provided array of bytes to a {@code char} and returns the
* resulting array of characters.
*/
private static char[] castBytesToChars(byte[] bytes) {
if (bytes == null) {
return null;
}
char[] chars = new char[bytes.length];
for (int i = 0; i < bytes.length; i++) {
chars[i] = (char) (bytes[i] & 0xff);
}
return chars;
}
/**
* Returns the character encoding used by the console.
*/
private static Charset getConsoleEncoding() {
// IMPLEMENTATION NOTE: There is no public API for obtaining the console's character
// encoding. We thus cheat by using implementation details of the most popular JVMs.
String consoleCharsetName;
try {
Method encodingMethod = Console.class.getDeclaredMethod("encoding");
encodingMethod.setAccessible(true);
consoleCharsetName = (String) encodingMethod.invoke(null);
if (consoleCharsetName == null) {
return Charset.defaultCharset();
}
} catch (ReflectiveOperationException e) {
Charset defaultCharset = Charset.defaultCharset();
System.err.println("warning: Failed to obtain console character encoding name. Assuming " + defaultCharset);
return defaultCharset;
}
try {
return Charset.forName(consoleCharsetName);
} catch (IllegalArgumentException e) {
// On Windows 10, cp65001 is the UTF-8 code page. For some reason, popular JVMs don't
// have a mapping for cp65001...
if ("cp65001".equals(consoleCharsetName)) {
return StandardCharsets.UTF_8;
}
Charset defaultCharset = Charset.defaultCharset();
System.err.println("warning: Console uses unknown character encoding: "
+ consoleCharsetName
+ ". Using "
+ defaultCharset
+ " instead");
return defaultCharset;
}
}
private static byte[] readEncodedPassword(InputStream in) throws IOException {
ByteArrayOutputStream result = new ByteArrayOutputStream();
int b;
while ((b = in.read()) != -1) {
if (b == '\n') {
break;
} else if (b == '\r') {
int next = in.read();
if ((next == -1) || (next == '\n')) {
break;
}
if (!(in instanceof PushbackInputStream)) {
in = new PushbackInputStream(in);
}
((PushbackInputStream) in).unread(next);
}
result.write(b);
}
return result.toByteArray();
}
/**
* Returns the passwords described by the provided spec. The reason there may be more than one
* password is compatibility with {@code keytool} and {@code jarsigner} which in certain cases
* use the form of passwords encoded using the console's character encoding.
*
* <p>Supported specs:
* <ul>
* <li><em>stdin</em> -- read password as a line from console, if available, or standard
* input if console is not available</li>
* <li><em>pass:password</em> -- password specified inside the spec, starting after
* {@code pass:}</li>
* <li><em>file:path</em> -- read password as a line from the specified file</li>
* <li><em>env:name</em> -- password is in the specified environment variable</li>
* </ul>
*
* <p>When the same file (including standard input) is used for providing multiple passwords,
* the passwords are read from the file one line at a time.
*/
public List<char[]> getPasswords(String spec, String description) throws IOException {
// IMPLEMENTATION NOTE: Java KeyStore and PBEKeySpec APIs take passwords as arrays of
// Unicode characters (char[]). Unfortunately, it appears that Sun/Oracle keytool and
// jarsigner in some cases use passwords which are the encoded form obtained using the
// console's character encoding. For example, if the encoding is UTF-8, keytool and
// jarsigner will use the password which is obtained by upcasting each byte of the UTF-8
// encoded form to char. This occurs only when the password is read from stdin/console, and
// does not occur when the password is read from a command-line parameter.
// There are other tools which use the Java KeyStore API correctly.
// Thus, for each password spec, there may be up to three passwords:
// * Unicode characters,
// * characters (upcast bytes) obtained from encoding the password using the console's
// character encoding,
// * characters (upcast bytes) obtained from encoding the password using the JVM's default
// character encoding.
//
// For a sample password "\u0061\u0062\u00a1\u00e4\u044e\u0031":
// On Windows 10 with English US as the UI language, IBM437 is used as console encoding and
// windows-1252 is used as the JVM default encoding:
// * keytool -genkey -v -keystore native.jks -keyalg RSA -keysize 2048 -validity 10000
// -alias test
// generates a keystore and key which decrypt only with
// "\u0061\u0062\u00ad\u0084\u003f\u0031"
// * keytool -genkey -v -keystore native.jks -keyalg RSA -keysize 2048 -validity 10000
// -alias test -storepass <pass here>
// generates a keystore and key which decrypt only with
// "\u0061\u0062\u00a1\u00e4\u003f\u0031"
// On modern OSX/Linux UTF-8 is used as the console and JVM default encoding:
// * keytool -genkey -v -keystore native.jks -keyalg RSA -keysize 2048 -validity 10000
// -alias test
// generates a keystore and key which decrypt only with
// "\u0061\u0062\u00c2\u00a1\u00c3\u00a4\u00d1\u008e\u0031"
// * keytool -genkey -v -keystore native.jks -keyalg RSA -keysize 2048 -validity 10000
// -alias test
// generates a keystore and key which decrypt only with
// "\u0061\u0062\u00a1\u00e4\u044e\u0031"
assertNotClosed();
if (spec.startsWith("pass:")) {
char[] pwd = spec.substring("pass:".length()).toCharArray();
return getPasswords(pwd);
} else if (SPEC_STDIN.equals(spec)) {
Console console = System.console();
if (console != null) {
// Reading from console
char[] pwd = console.readPassword(description + ": ");
if (pwd == null) {
throw new IOException("Failed to read " + description + ": console closed");
}
return getPasswords(pwd);
} else {
// Console not available -- reading from redirected input
System.out.println(description + ": ");
byte[] encodedPwd = readEncodedPassword(System.in);
if (encodedPwd.length == 0) {
throw new IOException("Failed to read " + description + ": standard input closed");
}
// By default, textual input obtained via standard input is supposed to be decoded
// using the in JVM default character encoding but we also try the console's
// encoding just in case.
return getPasswords(encodedPwd, Charset.defaultCharset(), CONSOLE_CHARSET);
}
} else if (spec.startsWith("file:")) {
String name = spec.substring("file:".length());
File file = new File(name).getCanonicalFile();
InputStream in = mFileInputStreams.get(file);
if (in == null) {
in = new FileInputStream(file);
mFileInputStreams.put(file, in);
}
byte[] encodedPwd = readEncodedPassword(in);
if (encodedPwd.length == 0) {
throw new IOException("Failed to read " + description + " : end of file reached in " + file);
}
// By default, textual input from files is supposed to be treated as encoded using JVM's
// default character encoding.
return getPasswords(encodedPwd, Charset.defaultCharset());
} else if (spec.startsWith("env:")) {
String name = spec.substring("env:".length());
String value = System.getenv(name);
if (value == null) {
throw new IOException("Failed to read " + description + ": environment variable " + value + " not specified");
}
return getPasswords(value.toCharArray());
} else {
throw new IOException("Unsupported password spec for " + description + ": " + spec);
}
}
private void assertNotClosed() {
if (mClosed) {
throw new IllegalStateException("Closed");
}
}
@Override
public void close() {
for (InputStream in : mFileInputStreams.values()) {
try {
in.close();
} catch (IOException ignored) {
}
}
mFileInputStreams.clear();
mClosed = true;
}
}
| |
/**
* Copyright (c) 2011, University of Konstanz, Distributed Systems Group
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the University of Konstanz nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.treetank.service.xml.xpath.comparators;
import java.util.HashMap;
import java.util.Map;
import org.treetank.data.Type;
import org.treetank.exception.TTXPathException;
import org.treetank.service.xml.xpath.EXPathError;
/**
* <h1>CompKind</h1>
* <p>
* Enumeration for all comparison kinds.
* </p>
*/
public enum CompKind {
/** comparison type 'equal'. */
EQ("eq", "=") {
/**
* {@inheritDoc}
*/
@Override
public boolean compare(final String mOperand1, final String mOperand2, final Type mType)
throws TTXPathException {
switch (mType) {
case FLOAT:
return (Float.parseFloat(mOperand1) == Float.parseFloat(mOperand2));
case DECIMAL:
case DOUBLE:
return (Double.parseDouble(mOperand1) == Double.parseDouble(mOperand2));
case INTEGER:
// return (Integer.getInteger(operand1) ==
// Integer.getInteger(operand2));
return ((int)Double.parseDouble(mOperand1) == (int)Double.parseDouble(mOperand2));
case BOOLEAN:
return (Boolean.parseBoolean(mOperand1) == Boolean.parseBoolean(mOperand2));
case STRING:
case ANY_URI:
return mOperand1.compareTo(mOperand2) == 0;
case DATE:
case DATE_TIME:
case TIME:
case DURATION:
case HEX_BINARY:
case BASE_64_BINARY:
case QNAME:
case NOTATION:
case G_DAY:
case G_MONTH_DAY:
case G_MONTH:
case G_YEAR:
case G_YEAR_MONTH:
throw new IllegalStateException("Not implemented for this type yet");
default:
throw EXPathError.XPTY0004.getEncapsulatedException();
}
}
},
/** comparison type 'not equal'. */
NE("ne", "!=") {
/**
* {@inheritDoc}
*/
@Override
public boolean compare(final String mOperand1, final String mOperand2, final Type mType)
throws TTXPathException {
switch (mType) {
case FLOAT:
return (Float.parseFloat(mOperand1) != Float.parseFloat(mOperand2));
case DECIMAL:
case DOUBLE:
return (Double.parseDouble(mOperand1) != Double.parseDouble(mOperand2));
case INTEGER:
return ((int)Double.parseDouble(mOperand1) != (int)Double.parseDouble(mOperand2));
case BOOLEAN:
return (Boolean.parseBoolean(mOperand1) != Boolean.parseBoolean(mOperand2));
case STRING:
case ANY_URI:
return mOperand1.compareTo(mOperand2) != 0;
case DATE:
case DATE_TIME:
case TIME:
case DURATION:
case HEX_BINARY:
case BASE_64_BINARY:
case QNAME:
case NOTATION:
case G_DAY:
case G_MONTH_DAY:
case G_MONTH:
case G_YEAR:
case G_YEAR_MONTH:
throw new IllegalStateException("Not implemented for this type yet");
default:
throw EXPathError.XPTY0004.getEncapsulatedException();
}
}
},
/** comparison type 'less than'. */
LT("lt", "<") {
/**
* {@inheritDoc}
*/
@Override
public boolean compare(final String mOperand1, final String mOperand2, final Type mType)
throws TTXPathException {
switch (mType) {
case FLOAT:
return (Float.parseFloat(mOperand1) < Float.parseFloat(mOperand2));
case DECIMAL:
case DOUBLE:
return (Double.parseDouble(mOperand1) < Double.parseDouble(mOperand2));
case INTEGER:
return ((int)Double.parseDouble(mOperand1) < (int)Double.parseDouble(mOperand2));
case BOOLEAN:
// true, if operand1 == false and operand2 == true
return (!Boolean.parseBoolean(mOperand1) && Boolean.parseBoolean(mOperand2));
case STRING:
case ANY_URI:
return mOperand1.compareTo(mOperand2) < 0;
case DATE:
case DATE_TIME:
case TIME:
case YEAR_MONTH_DURATION:
case DAY_TIME_DURATION:
throw new IllegalStateException("Not implemented for this type yet");
default:
throw EXPathError.XPTY0004.getEncapsulatedException();
}
}
},
/** comparison type 'less or equal than'. */
LE("le", "<=") {
/**
* {@inheritDoc}
*/
@Override
public boolean compare(final String mOperand1, final String mOperand2, final Type mType)
throws TTXPathException {
switch (mType) {
case FLOAT:
return (Float.parseFloat(mOperand1) <= Float.parseFloat(mOperand2));
case DECIMAL:
case DOUBLE:
return (Double.parseDouble(mOperand1) <= Double.parseDouble(mOperand2));
case INTEGER:
return ((int)Double.parseDouble(mOperand1) <= (int)Double.parseDouble(mOperand2));
case BOOLEAN:
return !Boolean.parseBoolean(mOperand1) || Boolean.parseBoolean(mOperand2);
case STRING:
case ANY_URI:
return mOperand1.compareTo(mOperand2) < 1;
case DATE:
case DATE_TIME:
case TIME:
case YEAR_MONTH_DURATION:
case DAY_TIME_DURATION:
throw new IllegalStateException("Not implemented for this type yet");
default:
throw EXPathError.XPTY0004.getEncapsulatedException();
}
}
},
/** comparison type 'greater than'. */
GT("gt", ">") {
/**
* {@inheritDoc}
*/
@Override
public boolean compare(final String mOperand1, final String mOperand2, final Type mType)
throws TTXPathException {
switch (mType) {
case FLOAT:
return (Float.parseFloat(mOperand1) > Float.parseFloat(mOperand2));
case DECIMAL:
case DOUBLE:
return (Double.parseDouble(mOperand1) > Double.parseDouble(mOperand2));
case INTEGER:
return ((int)Double.parseDouble(mOperand1) > (int)Double.parseDouble(mOperand2));
case BOOLEAN:
// true, if operand1 == true and operand2 == false
return (Boolean.parseBoolean(mOperand1) && !Boolean.parseBoolean(mOperand2));
case STRING:
case ANY_URI:
return mOperand1.compareTo(mOperand2) > 0;
case DATE:
case DATE_TIME:
case TIME:
case YEAR_MONTH_DURATION:
case DAY_TIME_DURATION:
throw new IllegalStateException("Not implemented for this type yet");
default:
throw EXPathError.XPTY0004.getEncapsulatedException();
}
}
},
/** value comparison type 'greater or equal than'. */
GE("ge", ">=") {
/**
* {@inheritDoc}
*/
@Override
public boolean compare(final String mOperand1, final String mOperand2, final Type mType)
throws TTXPathException {
switch (mType) {
case FLOAT:
return (Float.parseFloat(mOperand1) >= Float.parseFloat(mOperand2));
case DECIMAL:
case DOUBLE:
return (Double.parseDouble(mOperand1) >= Double.parseDouble(mOperand2));
case INTEGER:
return ((int)Double.parseDouble(mOperand1) >= (int)Double.parseDouble(mOperand2));
case BOOLEAN:
return (Boolean.parseBoolean(mOperand1) || !Boolean.parseBoolean(mOperand2));
case STRING:
case ANY_URI:
return mOperand1.compareTo(mOperand2) > -1;
case DATE:
case DATE_TIME:
case TIME:
case YEAR_MONTH_DURATION:
case DAY_TIME_DURATION:
throw new IllegalStateException("Not implemented for this type yet");
default:
throw EXPathError.XPTY0004.getEncapsulatedException();
}
}
},
/** node comparison type. */
FO(">>") {
/**
* {@inheritDoc}
*/
@Override
public boolean compare(final String mOperand1, final String mOperand2, final Type mType)
throws TTXPathException {
throw new IllegalStateException("Evaluation of node comparisons not possible");
}
},
/** node comparison type . */
PRE("<<") {
/**
* {@inheritDoc}
*/
@Override
public boolean compare(final String mOperand1, final String mOperand2, final Type mType)
throws TTXPathException {
throw new IllegalStateException("Evaluation of node comparisons not possible");
}
},
/** node comparison type . */
IS("is") {
/**
* {@inheritDoc}
*/
@Override
public boolean compare(final String mOperand1, final String mOperand2, final Type mType)
throws TTXPathException {
return (int)Double.parseDouble(mOperand1) == (int)Double.parseDouble(mOperand2);
}
};
/** String representation of comp. */
private final String[] mCompAsString;
/** Private mapping for easy retrieval of enums. */
private static final Map<String, CompKind> STRINGTOENUM = new HashMap<String, CompKind>();
static {
for (final CompKind kind : values()) {
for (final String compAsString : kind.mCompAsString) {
STRINGTOENUM.put(compAsString, kind);
}
}
}
/**
* Private Constructor.
*
* @param paramCompAsString
* String to be set.
*/
private CompKind(final String... paramCompAsString) {
mCompAsString = paramCompAsString;
}
/**
* Compares the two input values.
*
* @param mOperand1
* string value of first comparison operand
* @param mOperand2
* string value of second comparison operand
* @param mType
* comparison type
* @return result of the boolean comparison
* @throws TTXPathException
* if anything weird happens while comparison.
*/
public abstract boolean compare(final String mOperand1, final String mOperand2, final Type mType)
throws TTXPathException;
/**
* Public method to easy retrieve the Function-Class for a name.
*
* @param paramName
* the name of the function to be retrieved.
* @return the Function
*/
public static CompKind fromString(final String paramName) {
return STRINGTOENUM.get(paramName);
}
}
| |
package eu.drus.jpa.unit.mongodb.ext;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.hasItems;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.when;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
import com.mongodb.AuthenticationMechanism;
import com.mongodb.MongoClientOptions;
import com.mongodb.MongoCredential;
import com.mongodb.ServerAddress;
import eu.drus.jpa.unit.mongodb.ext.DataNucleusConfiguration.ConfigurationFactoryImpl;
import eu.drus.jpa.unit.spi.PersistenceUnitDescriptor;
@RunWith(MockitoJUnitRunner.class)
public class DataNucleusConfigurationTest {
@Mock
private PersistenceUnitDescriptor descriptor;
@Test
public void testHost() {
// GIVEN
final Map<String, Object> properties = new HashMap<>();
when(descriptor.getProperties()).thenReturn(properties);
// mongodb:[{server}][/{dbName}] [,{server2}[,server3}]]
properties.put("datanucleus.ConnectionURL",
"mongodb:www.example.com/foo, www2.example.com:123,192.0.2.1, 192.0.2.2:123, 2001:db8::ff00:42:8329, [2001:db8::ff00:42:8329]:123 ");
final ConfigurationFactory factory = new ConfigurationFactoryImpl();
// WHEN
final Configuration configuration = factory.createConfiguration(descriptor);
// THEN
assertThat(configuration, notNullValue());
final List<ServerAddress> serverAddresses = configuration.getServerAddresses();
assertThat(serverAddresses, notNullValue());
assertThat(serverAddresses.size(), equalTo(6));
assertThat(serverAddresses,
hasItems(new ServerAddress("www.example.com"), new ServerAddress("www2.example.com", 123), new ServerAddress("192.0.2.1"),
new ServerAddress("192.0.2.2", 123), new ServerAddress("2001:db8::ff00:42:8329"),
new ServerAddress("[2001:db8::ff00:42:8329]", 123)));
}
@Test
public void testDefaultHost() {
// GIVEN
final Map<String, Object> properties = new HashMap<>();
when(descriptor.getProperties()).thenReturn(properties);
properties.put("datanucleus.ConnectionURL", "mongodb:/foo");
final ConfigurationFactory factory = new ConfigurationFactoryImpl();
// WHEN
final Configuration configuration = factory.createConfiguration(descriptor);
// THEN
assertThat(configuration, notNullValue());
final List<ServerAddress> serverAddresses = configuration.getServerAddresses();
assertThat(serverAddresses, notNullValue());
assertThat(serverAddresses.size(), equalTo(1));
assertThat(serverAddresses, hasItems(new ServerAddress("127.0.0.1")));
}
@Test
public void testDatabaseName() {
// GIVEN
final Map<String, Object> properties = new HashMap<>();
when(descriptor.getProperties()).thenReturn(properties);
properties.put("datanucleus.ConnectionURL", "mongodb:192.0.2.2:123/foo");
final ConfigurationFactory factory = new ConfigurationFactoryImpl();
// WHEN
final Configuration configuration = factory.createConfiguration(descriptor);
// THEN
assertThat(configuration, notNullValue());
assertThat(configuration.getDatabaseName(), equalTo("foo"));
}
@Test
public void testMongoCredentialsAreEmptyIfUsernameIsNotConfigured() {
// GIVEN
final Map<String, Object> properties = new HashMap<>();
when(descriptor.getProperties()).thenReturn(properties);
properties.put("datanucleus.ConnectionURL", "mongodb:/foo");
properties.put("datanucleus.ConnectionPassword", "foo");
final ConfigurationFactory factory = new ConfigurationFactoryImpl();
// WHEN
final Configuration configuration = factory.createConfiguration(descriptor);
// THEN
assertThat(configuration, notNullValue());
final List<MongoCredential> credentials = configuration.getCredentials();
assertThat(credentials, notNullValue());
assertTrue(credentials.isEmpty());
}
@Test
public void testMongoCredentials() {
// GIVEN
final Map<String, Object> properties = new HashMap<>();
when(descriptor.getProperties()).thenReturn(properties);
properties.put("datanucleus.ConnectionURL", "mongodb:/foo");
properties.put("datanucleus.ConnectionUserName", "user");
properties.put("datanucleus.ConnectionPassword", "pass");
final ConfigurationFactory factory = new ConfigurationFactoryImpl();
// WHEN
final Configuration configuration = factory.createConfiguration(descriptor);
// THEN
assertThat(configuration, notNullValue());
final List<MongoCredential> credentials = configuration.getCredentials();
assertThat(credentials, notNullValue());
assertThat(credentials.size(), equalTo(1));
final MongoCredential mongoCredential = credentials.get(0);
assertThat(mongoCredential, notNullValue());
assertThat(mongoCredential.getUserName(), equalTo("user"));
assertThat(mongoCredential.getPassword(), equalTo("pass".toCharArray()));
assertThat(mongoCredential.getSource(), equalTo("admin"));
assertThat(mongoCredential.getAuthenticationMechanism(), equalTo(AuthenticationMechanism.PLAIN));
}
@Test
public void testMongoClientOptions() {
// GIVEN
final Map<String, Object> properties = new HashMap<>();
when(descriptor.getProperties()).thenReturn(properties);
// it looks like only the two options below are supported by DataNucleus
// final String description = "Some Description";
// final String applicationName = "Some Application";
// final int minConnectionsPerHost = 1;
final int connectionsPerHost = 2;
final int threadsAllowedToBlockForConnectionMultiplier = 2;
// final int serverSelectionTimeout = 500;
// final int maxWaitTime = 1000;
// final int maxConnectionIdleTime = 30000;
// final int maxConnectionLifeTime = 60000;
// final int connectTimeout = 1000;
// final int socketTimeout = 1500;
// final boolean socketKeepAlive = true;
// final boolean sslEnabled = true;
// final boolean sslInvalidHostNameAllowed = false;
// final boolean cursorFinalizerEnabled = false;
// final boolean alwaysUseMBeans = true;
// final int heartbeatFrequency = 1600;
// final int minHeartbeatFrequency = 1700;
// final int heartbeatConnectTimeout = 1800;
// final int heartbeatSocketTimeout = 1900;
// final int localThreshold = 2000;
// final String requiredReplicaSetName = "Replica Name";
properties.put("datanucleus.ConnectionURL", "mongodb:/foo");
// properties.put("datanucleus.mongodb.alwaysUseMBeans", "" + alwaysUseMBeans);
// properties.put("datanucleus.mongodb.applicationName", applicationName);
properties.put("datanucleus.mongodb.connectionsPerHost", "" + connectionsPerHost);
// properties.put("datanucleus.mongodb.connectTimeout", "" + connectTimeout);
// properties.put("datanucleus.mongodb.cursorFinalizerEnabled", "" +
// cursorFinalizerEnabled);
// properties.put("datanucleus.mongodb.description", description);
// properties.put("datanucleus.mongodb.heartbeatConnectTimeout", "" +
// heartbeatConnectTimeout);
// properties.put("datanucleus.mongodb.heartbeatFrequency", "" + heartbeatFrequency);
// properties.put("datanucleus.mongodb.heartbeatSocketTimeout", "" +
// heartbeatSocketTimeout);
// properties.put("datanucleus.mongodb.localThreshold", "" + localThreshold);
// properties.put("datanucleus.mongodb.maxWaitTime", "" + maxWaitTime);
// properties.put("datanucleus.mongodb.maxConnectionIdleTime", "" + maxConnectionIdleTime);
// properties.put("datanucleus.mongodb.maxConnectionLifeTime", "" + maxConnectionLifeTime);
// properties.put("datanucleus.mongodb.minConnectionsPerHost", "" + minConnectionsPerHost);
// properties.put("datanucleus.mongodb.minHeartbeatFrequency", "" + minHeartbeatFrequency);
// properties.put("datanucleus.mongodb.requiredReplicaSetName", requiredReplicaSetName);
// properties.put("datanucleus.mongodb.serverSelectionTimeout", "" +
// serverSelectionTimeout);
// properties.put("datanucleus.mongodb.socketKeepAlive", "" + socketKeepAlive);
// properties.put("datanucleus.mongodb.socketTimeout", "" + socketTimeout);
// properties.put("datanucleus.mongodb.sslEnabled", "" + sslEnabled);
// properties.put("datanucleus.mongodb.sslInvalidHostNameAllowed", "" +
// sslInvalidHostNameAllowed);
properties.put("datanucleus.mongodb.threadsAllowedToBlockForConnectionMultiplier",
"" + threadsAllowedToBlockForConnectionMultiplier);
final ConfigurationFactory factory = new ConfigurationFactoryImpl();
// WHEN
final Configuration configuration = factory.createConfiguration(descriptor);
// THEN
assertThat(configuration, notNullValue());
final MongoClientOptions clientOptions = configuration.getClientOptions();
assertThat(clientOptions, notNullValue());
// assertThat(clientOptions.getApplicationName(), equalTo(applicationName));
assertThat(clientOptions.getConnectionsPerHost(), equalTo(connectionsPerHost));
// assertThat(clientOptions.getConnectTimeout(), equalTo(connectTimeout));
// assertThat(clientOptions.getDescription(), equalTo(description));
// assertThat(clientOptions.getHeartbeatConnectTimeout(), equalTo(heartbeatConnectTimeout));
// assertThat(clientOptions.getHeartbeatFrequency(), equalTo(heartbeatFrequency));
// assertThat(clientOptions.getHeartbeatSocketTimeout(), equalTo(heartbeatSocketTimeout));
// assertThat(clientOptions.getLocalThreshold(), equalTo(localThreshold));
// assertThat(clientOptions.getMaxConnectionIdleTime(), equalTo(maxConnectionIdleTime));
// assertThat(clientOptions.getMaxConnectionLifeTime(), equalTo(maxConnectionLifeTime));
// assertThat(clientOptions.getMaxWaitTime(), equalTo(maxWaitTime));
// assertThat(clientOptions.getMinConnectionsPerHost(), equalTo(minConnectionsPerHost));
// assertThat(clientOptions.getMinHeartbeatFrequency(), equalTo(minHeartbeatFrequency));
// assertThat(clientOptions.getRequiredReplicaSetName(), equalTo(requiredReplicaSetName));
// assertThat(clientOptions.getServerSelectionTimeout(), equalTo(serverSelectionTimeout));
// assertThat(clientOptions.getSocketTimeout(), equalTo(socketTimeout));
assertThat(clientOptions.getThreadsAllowedToBlockForConnectionMultiplier(), equalTo(threadsAllowedToBlockForConnectionMultiplier));
// assertThat(clientOptions.isAlwaysUseMBeans(), equalTo(alwaysUseMBeans));
// assertThat(clientOptions.isCursorFinalizerEnabled(), equalTo(cursorFinalizerEnabled));
// assertThat(clientOptions.isSocketKeepAlive(), equalTo(socketKeepAlive));
// assertThat(clientOptions.isSslEnabled(), equalTo(sslEnabled));
// assertThat(clientOptions.isSslInvalidHostNameAllowed(),
// equalTo(sslInvalidHostNameAllowed));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.query.cache;
import com.google.common.base.Function;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import com.google.common.primitives.Ints;
import com.google.common.primitives.UnsignedBytes;
import org.apache.druid.guice.annotations.PublicApi;
import org.apache.druid.java.util.common.Cacheable;
import org.apache.druid.java.util.common.StringUtils;
import javax.annotation.Nullable;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
/**
* CacheKeyBuilder is a tool for easily generating cache keys of {@link Cacheable} objects.
*
* The layout of the serialized cache key is like below.
*
* +--------------------------------------------------------+
* | ID (1 byte) |
* | type key (1 byte) | serialized value (variable length) |
* | type key (1 byte) | serialized value (variable length) |
* | ... |
* +--------------------------------------------------------+
*
*/
@PublicApi
public class CacheKeyBuilder
{
static final byte BYTE_KEY = 0;
static final byte BYTE_ARRAY_KEY = 1;
static final byte BOOLEAN_KEY = 2;
static final byte INT_KEY = 3;
static final byte FLOAT_KEY = 4;
static final byte FLOAT_ARRAY_KEY = 5;
static final byte DOUBLE_KEY = 6;
static final byte STRING_KEY = 7;
static final byte STRING_LIST_KEY = 8;
static final byte CACHEABLE_KEY = 9;
static final byte CACHEABLE_LIST_KEY = 10;
static final byte DOUBLE_ARRAY_KEY = 11;
static final byte[] STRING_SEPARATOR = new byte[]{(byte) 0xFF};
static final byte[] EMPTY_BYTES = StringUtils.EMPTY_BYTES;
private static class Item
{
private final byte typeKey;
private final byte[] item;
Item(byte typeKey, byte[] item)
{
this.typeKey = typeKey;
this.item = item;
}
int byteSize()
{
return 1 + item.length;
}
}
private static byte[] floatArrayToByteArray(float[] input)
{
final ByteBuffer buffer = ByteBuffer.allocate(Float.BYTES * input.length);
buffer.asFloatBuffer().put(input);
return buffer.array();
}
private static byte[] doubleArrayToByteArray(double[] input)
{
final ByteBuffer buffer = ByteBuffer.allocate(Double.BYTES * input.length);
buffer.asDoubleBuffer().put(input);
return buffer.array();
}
private static byte[] cacheableToByteArray(@Nullable Cacheable cacheable)
{
if (cacheable == null) {
return EMPTY_BYTES;
} else {
final byte[] key = cacheable.getCacheKey();
Preconditions.checkArgument(!Arrays.equals(key, EMPTY_BYTES), "cache key is equal to the empty key");
return key;
}
}
private static byte[] stringCollectionToByteArray(Collection<String> input, boolean preserveOrder)
{
return collectionToByteArray(
input,
new Function<String, byte[]>()
{
@Override
public byte[] apply(@Nullable String input)
{
return StringUtils.toUtf8WithNullToEmpty(input);
}
},
STRING_SEPARATOR,
preserveOrder
);
}
private static byte[] cacheableCollectionToByteArray(Collection<? extends Cacheable> input, boolean preserveOrder)
{
return collectionToByteArray(
input,
new Function<Cacheable, byte[]>()
{
@Override
public byte[] apply(@Nullable Cacheable input)
{
return input == null ? EMPTY_BYTES : input.getCacheKey();
}
},
EMPTY_BYTES,
preserveOrder
);
}
private static <T> byte[] collectionToByteArray(
Collection<? extends T> collection,
Function<T, byte[]> serializeFunction,
byte[] separator,
boolean preserveOrder
)
{
if (collection.size() > 0) {
List<byte[]> byteArrayList = Lists.newArrayListWithCapacity(collection.size());
int totalByteLength = 0;
for (T eachItem : collection) {
final byte[] byteArray = serializeFunction.apply(eachItem);
totalByteLength += byteArray.length;
byteArrayList.add(byteArray);
}
if (!preserveOrder) {
// Sort the byte array list to guarantee that collections of same items but in different orders make the same result
Collections.sort(byteArrayList, UnsignedBytes.lexicographicalComparator());
}
final Iterator<byte[]> iterator = byteArrayList.iterator();
final int bufSize = Integer.BYTES + separator.length * (byteArrayList.size() - 1) + totalByteLength;
final ByteBuffer buffer = ByteBuffer.allocate(bufSize)
.putInt(byteArrayList.size())
.put(iterator.next());
while (iterator.hasNext()) {
buffer.put(separator).put(iterator.next());
}
return buffer.array();
} else {
return EMPTY_BYTES;
}
}
private final List<Item> items = new ArrayList<>();
private final byte id;
private int size;
public CacheKeyBuilder(byte id)
{
this.id = id;
this.size = 1;
}
public CacheKeyBuilder appendByte(byte input)
{
appendItem(BYTE_KEY, new byte[]{input});
return this;
}
public CacheKeyBuilder appendByteArray(byte[] input)
{
appendItem(BYTE_ARRAY_KEY, input);
return this;
}
public CacheKeyBuilder appendString(@Nullable String input)
{
appendItem(STRING_KEY, StringUtils.toUtf8WithNullToEmpty(input));
return this;
}
/**
* Add a collection of strings to the cache key.
* Strings in the collection are concatenated with a separator of '0xFF',
* and they appear in the cache key in their input order.
*
* @param input a collection of strings to be included in the cache key
* @return this instance
*/
public CacheKeyBuilder appendStrings(Collection<String> input)
{
appendItem(STRING_LIST_KEY, stringCollectionToByteArray(input, true));
return this;
}
/**
* Add a collection of strings to the cache key.
* Strings in the collection are sorted by their byte representation and
* concatenated with a separator of '0xFF'.
*
* @param input a collection of strings to be included in the cache key
* @return this instance
*/
public CacheKeyBuilder appendStringsIgnoringOrder(Collection<String> input)
{
appendItem(STRING_LIST_KEY, stringCollectionToByteArray(input, false));
return this;
}
public CacheKeyBuilder appendBoolean(boolean input)
{
appendItem(BOOLEAN_KEY, new byte[]{(byte) (input ? 1 : 0)});
return this;
}
public CacheKeyBuilder appendInt(int input)
{
appendItem(INT_KEY, Ints.toByteArray(input));
return this;
}
public CacheKeyBuilder appendFloat(float input)
{
appendItem(FLOAT_KEY, ByteBuffer.allocate(Float.BYTES).putFloat(input).array());
return this;
}
public CacheKeyBuilder appendDouble(double input)
{
appendItem(DOUBLE_KEY, ByteBuffer.allocate(Double.BYTES).putDouble(input).array());
return this;
}
public CacheKeyBuilder appendDoubleArray(double[] input)
{
appendItem(DOUBLE_ARRAY_KEY, doubleArrayToByteArray(input));
return this;
}
public CacheKeyBuilder appendFloatArray(float[] input)
{
appendItem(FLOAT_ARRAY_KEY, floatArrayToByteArray(input));
return this;
}
public CacheKeyBuilder appendCacheable(@Nullable Cacheable input)
{
appendItem(CACHEABLE_KEY, cacheableToByteArray(input));
return this;
}
/**
* Add a collection of Cacheables to the cache key.
* Cacheables in the collection are concatenated without any separator,
* and they appear in the cache key in their input order.
*
* @param input a collection of Cacheables to be included in the cache key
* @return this instance
*/
public CacheKeyBuilder appendCacheables(Collection<? extends Cacheable> input)
{
appendItem(CACHEABLE_LIST_KEY, cacheableCollectionToByteArray(input, true));
return this;
}
/**
* Add a collection of Cacheables to the cache key.
* Cacheables in the collection are sorted by their byte representation and
* concatenated without any separator.
*
* @param input a collection of Cacheables to be included in the cache key
* @return this instance
*/
public CacheKeyBuilder appendCacheablesIgnoringOrder(Collection<? extends Cacheable> input)
{
appendItem(CACHEABLE_LIST_KEY, cacheableCollectionToByteArray(input, false));
return this;
}
private void appendItem(byte typeKey, byte[] input)
{
final Item item = new Item(typeKey, input);
items.add(item);
size += item.byteSize();
}
public byte[] build()
{
final ByteBuffer buffer = ByteBuffer.allocate(size);
buffer.put(id);
for (Item item : items) {
buffer.put(item.typeKey).put(item.item);
}
return buffer.array();
}
}
| |
/*
* Copyright 2011 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.walkaround.slob.server;
import com.google.appengine.api.datastore.Entity;
import com.google.appengine.api.datastore.FetchOptions;
import com.google.appengine.api.datastore.Key;
import com.google.appengine.api.datastore.KeyFactory;
import com.google.appengine.api.datastore.Query;
import com.google.appengine.api.datastore.Query.FilterOperator;
import com.google.appengine.api.datastore.Query.SortDirection;
import com.google.appengine.api.datastore.Text;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.google.inject.assistedinject.Assisted;
import com.google.inject.assistedinject.AssistedInject;
import com.google.walkaround.slob.shared.ChangeData;
import com.google.walkaround.slob.shared.ChangeRejected;
import com.google.walkaround.slob.shared.ClientId;
import com.google.walkaround.slob.shared.InvalidSnapshot;
import com.google.walkaround.slob.shared.SlobId;
import com.google.walkaround.slob.shared.SlobModel;
import com.google.walkaround.slob.shared.SlobModel.ReadableSlob;
import com.google.walkaround.slob.shared.StateAndVersion;
import com.google.walkaround.util.server.RetryHelper.PermanentFailure;
import com.google.walkaround.util.server.RetryHelper.RetryableFailure;
import com.google.walkaround.util.server.appengine.CheckedDatastore.CheckedIterator;
import com.google.walkaround.util.server.appengine.CheckedDatastore.CheckedTransaction;
import com.google.walkaround.util.server.appengine.DatastoreUtil;
import com.google.walkaround.util.shared.Assert;
import java.util.List;
import java.util.logging.Logger;
import javax.annotation.Nullable;
/**
* Functionality for traversing and appending to the mutation log.
*
* Use of only this class for mutating the log guarantees no corruption will
* occur.
*
* XXX(danilatos): Not necessarily thread safe, should only be used in a
* single-threaded fashion but let's make it thread safe anyway just in case...
* (better yet, add assertions that it's used only from one thread, so that we
* notice when it's not)
*
* @author danilatos@google.com (Daniel Danilatos)
* @author ohler@google.com (Christian Ohler)
*/
public class MutationLog {
public interface MutationLogFactory {
// TODO(danilatos): Rename this method to "get" to avoid connotations of
// creating objects. Do this any time there's low chance of conflicts.
MutationLog create(CheckedTransaction tx, SlobId objectId);
}
private static final Logger log = Logger.getLogger(MutationLog.class.getName());
@VisibleForTesting static final String DELTA_OP_PROPERTY = "op";
@VisibleForTesting static final String DELTA_CLIENT_ID_PROPERTY = "sid";
@VisibleForTesting static final String SNAPSHOT_DATA_PROPERTY = "Data";
private static final String METADATA_PROPERTY = "Metadata";
// Datastore does not allow ids to be 0.
private static long versionFromDeltaId(long id) {
return id - 1;
}
private static long deltaIdFromVersion(long version) {
return version + 1;
}
private static final class DeltaEntry {
private final SlobId objectId;
private final long version;
private final ChangeData<String> data;
DeltaEntry(SlobId objectId, long version, ChangeData<String> data) {
this.objectId = objectId;
this.version = version;
this.data = data;
}
long getResultingVersion() {
return version + 1;
}
@Override
public String toString() {
return "DeltaEntry(" + objectId + ", " + version + ", " + data + ")";
}
}
private static final class SnapshotEntry {
private final SlobId objectId;
private final long version;
private final String snapshot;
SnapshotEntry(SlobId objectId, long version, String snapshot) {
this.objectId = objectId;
this.version = version;
this.snapshot = snapshot;
}
@Override
public String toString() {
return "SnapshotEntry(" + objectId + ", " + version + ", " + snapshot + ")";
}
}
public Key makeRootEntityKey(SlobId objectId) {
Key key = KeyFactory.createKey(entityGroupKind, objectId.getId());
Assert.check(parseRootEntityKey(key).equals(objectId), "Mismatch: %s, %s", objectId, key);
return key;
}
public SlobId parseRootEntityKey(Key key) {
Preconditions.checkArgument(entityGroupKind.equals(key.getKind()),
"Key doesn't have kind %s: %s", entityGroupKind, key);
return new SlobId(key.getName());
}
private Key makeDeltaKey(SlobId objectId, long version) {
return KeyFactory.createKey(
makeRootEntityKey(objectId),
deltaEntityKind,
deltaIdFromVersion(version));
}
private Key makeSnapshotKey(SlobId objectId, long version) {
return KeyFactory.createKey(
makeRootEntityKey(objectId),
snapshotEntityKind,
version);
}
private Key makeDeltaKey(DeltaEntry e) {
return makeDeltaKey(e.objectId, e.version);
}
private Key makeSnapshotKey(SnapshotEntry e) {
return makeSnapshotKey(e.objectId, e.version);
}
private static long estimateSizeBytes(Key key) {
// It's not documented whether and how the representation returned by
// keyToString() relates to the representation that the API limits are based
// on; but in any case, it should be good enough for our estimate.
long web64Size = KeyFactory.keyToString(key).length();
// Base-64 strings have 6 bits per character, thus the ratio is 6/8 or 3/4.
// Divide by 4 first to avoid overflow. The base-64 string should be padded
// to a length that is a multiple of 4, so there is no rounding error. (If
// it's not padded, our estimate will be off; but that is tolerable, too.)
return (web64Size / 4) * 3;
}
private long estimateSizeBytes(DeltaEntry deltaEntry) {
return estimateSizeBytes(makeDeltaKey(deltaEntry))
+ DELTA_CLIENT_ID_PROPERTY.length() + deltaEntry.data.getClientId().getId().length()
+ DELTA_OP_PROPERTY.length() + deltaEntry.data.getPayload().length();
}
private long estimateSizeBytes(SnapshotEntry snapshotEntry) {
return estimateSizeBytes(makeSnapshotKey(snapshotEntry))
+ SNAPSHOT_DATA_PROPERTY.length() + snapshotEntry.snapshot.length();
}
public interface DeltaEntityConverter {
ChangeData<String> convert(Entity entity);
}
public static class DefaultDeltaEntityConverter implements DeltaEntityConverter {
@Override public ChangeData<String> convert(Entity entity) {
return new ChangeData<String>(
new ClientId(
DatastoreUtil.getExistingProperty(entity, DELTA_CLIENT_ID_PROPERTY, String.class)),
DatastoreUtil.getExistingProperty(entity, DELTA_OP_PROPERTY, Text.class).getValue());
}
}
private DeltaEntry parseDelta(Entity entity) {
SlobId slobId = new SlobId(entity.getKey().getParent().getName());
long version = versionFromDeltaId(entity.getKey().getId());
return new DeltaEntry(slobId, version, deltaEntityConverter.convert(entity));
}
private static void populateDeltaEntity(DeltaEntry in, Entity out) {
DatastoreUtil.setNonNullUnindexedProperty(out, DELTA_CLIENT_ID_PROPERTY,
in.data.getClientId().getId());
DatastoreUtil.setNonNullUnindexedProperty(out, DELTA_OP_PROPERTY,
new Text(in.data.getPayload()));
}
private static SnapshotEntry parseSnapshot(Entity e) {
SlobId id = new SlobId(e.getKey().getParent().getName());
long version = e.getKey().getId();
return new SnapshotEntry(id, version,
DatastoreUtil.getExistingProperty(e, SNAPSHOT_DATA_PROPERTY, Text.class).getValue());
}
private static void populateSnapshotEntity(SnapshotEntry in, Entity out) {
DatastoreUtil.setNonNullUnindexedProperty(out, SNAPSHOT_DATA_PROPERTY, new Text(in.snapshot));
}
/**
* An iterator over a datastore delta result list.
*
* Can be forward or reverse.
*
* The peek methods should be used in conjunction with
* {@link DeltaIterator#hasNext()} since they will throw
* {@code NoSuchElementException} if the end of the sequence is reached.
*/
public class DeltaIterator {
private final CheckedIterator it;
private final boolean forward;
private long previousResultingVersion;
@Nullable private DeltaEntry peeked = null;
public DeltaIterator(CheckedIterator it, boolean forward) {
this.it = Preconditions.checkNotNull(it, "Null it");
this.forward = forward;
previousResultingVersion = -1;
}
public boolean hasNext() throws PermanentFailure, RetryableFailure {
return peeked != null || it.hasNext();
}
DeltaEntry peekEntry() throws PermanentFailure, RetryableFailure {
if (peeked == null) {
// Let it.next() throw if there is no next.
peeked = parseDelta(it.next());
checkVersion(peeked);
}
return peeked;
}
DeltaEntry nextEntry() throws PermanentFailure, RetryableFailure {
DeltaEntry result = peekEntry();
peeked = null;
return result;
}
public ChangeData<String> peek() throws PermanentFailure, RetryableFailure {
return peekEntry().data;
}
public ChangeData<String> next() throws PermanentFailure, RetryableFailure {
return nextEntry().data;
}
public boolean isForward() {
return forward;
}
private void checkVersion(DeltaEntry delta) {
if (previousResultingVersion != -1) {
long expectedResultingVersion = previousResultingVersion + (forward ? 1 : -1);
Assert.check(delta.getResultingVersion() == expectedResultingVersion,
"%s: Expected version %s, got %s",
this, expectedResultingVersion, delta.getResultingVersion());
}
}
@Override public String toString() {
return "DeltaIterator(" + (forward ? "forward" : "reverse")
+ ", " + previousResultingVersion + ")";
}
}
/**
* Extends the log by additional deltas. Automatically takes snapshots as
* needed.
*
* Deltas and snapshots are staged in memory and added to the underlying
* transaction only when {@link Appender#flush()} is called.
*/
public class Appender {
private final StateAndVersion state;
private final List<DeltaEntry> stagedDeltaEntries = Lists.newArrayList();
private final List<SnapshotEntry> stagedSnapshotEntries = Lists.newArrayList();
private long estimatedBytesStaged = 0;
private long mostRecentSnapshotBytes;
private long totalDeltaBytesSinceSnapshot;
private Appender(StateAndVersion state,
long mostRecentSnapshotBytes,
long totalDeltaBytesSinceSnapshot) {
this.state = state;
this.mostRecentSnapshotBytes = mostRecentSnapshotBytes;
this.totalDeltaBytesSinceSnapshot = totalDeltaBytesSinceSnapshot;
}
/**
* Stages a delta for writing, verifying that it is valid (applies cleanly).
*/
public void append(ChangeData<String> delta) throws ChangeRejected {
long oldVersion = state.getVersion();
state.apply(delta);
DeltaEntry deltaEntry = new DeltaEntry(objectId, oldVersion,
new ChangeData<String>(delta.getClientId(), delta.getPayload()));
stagedDeltaEntries.add(deltaEntry);
long thisDeltaBytes = estimateSizeBytes(deltaEntry);
estimatedBytesStaged += thisDeltaBytes;
totalDeltaBytesSinceSnapshot += thisDeltaBytes;
// TODO(ohler): Avoid computing the snapshot every time since this is
// costly. Add a size estimation to slob instead. We need this anyway to
// implement size limits.
SnapshotEntry snapshotEntry = new SnapshotEntry(
objectId, state.getVersion(), state.getState().snapshot());
long snapshotBytes = estimateSizeBytes(snapshotEntry);
log.info("Object now at version " + state.getVersion() + "; snapshotBytes=" + snapshotBytes
+ ", mostRecentSnapshotBytes=" + mostRecentSnapshotBytes
+ ", totalDeltaBytesSinceSnapshot=" + totalDeltaBytesSinceSnapshot);
// To reconstruct the object's snapshot S at the current version, we will
// need to read the most recent snapshot P followed by a sequence of
// deltas D. To keep the amount of data required for this reconstruction
// within a constant factor of |S| (the size of S), we write S to disk if
// k * |S| < |P| + |D|, for some constant k.
//
// TODO(ohler): Provide bound on disk space consumption.
//
// TODO(ohler): This formula assumes that reading & reconstructing a
// snapshot has the same cost per byte as reading & applying a delta.
// That's probably not true. The cost of applying a delta may not even be
// linear in the size of that delta (and the same is true for
// reconstructing from a snapshot); this depends on the model. We should
// allow for models to influence when to take snapshots, perhaps by
// letting the model provide a size metric for deltas and snapshots
// instead of using bytes, or by measuring actual computation time if we
// can do that reliably. It would be best to make it impossible for
// models to cause quadratic disk space consumption, though.
final long k = 2;
if (k * snapshotBytes < mostRecentSnapshotBytes + totalDeltaBytesSinceSnapshot) {
log.info("Adding snapshot");
stagedSnapshotEntries.add(snapshotEntry);
mostRecentSnapshotBytes = snapshotBytes;
totalDeltaBytesSinceSnapshot = 0;
estimatedBytesStaged += snapshotBytes;
}
}
/**
* A rough estimate of the total bytes currently staged to be written. This
* may be subject to inaccuracies such as counting Java's UTF-16 characters
* in strings as one byte each (actual encoding that the API limits are
* based on is probably UTF-8) and only counting raw payloads without taking
* metadata, encoding overhead, or indexing overhead into account.
*/
public long estimatedBytesStaged() {
return estimatedBytesStaged;
}
public long getStagedVersion() {
return state.getVersion();
}
// If having ReadableSlob in addition to Slob adds too much complexity to
// SlobModel implementations, we could replace flush() with
// closeAndGetStagedState(), which would hand over ownership of the mutable
// Slob to the caller.
public ReadableSlob getStagedState() {
return state.getState();
}
public boolean hasNewDeltas() {
return !stagedDeltaEntries.isEmpty();
}
/**
* Returns the index data of the model at head state (including staged mutations).
*/
// TODO(ohler): Add a generic task queue hook to MutationLog and
// SlobStore, and use that to move indexing into an asynchronous task
// queue task. That would make indexing reliable and avoid polluting this
// API with SlobManager concerns like indexing.
public String getIndexedHtml() {
return state.getState().getIndexedContent();
}
/**
* Calls {@code put()} on all staged deltas and snapshots.
*/
public void flush() throws PermanentFailure, RetryableFailure {
log.info("Flushing " + stagedDeltaEntries.size() + " deltas and "
+ stagedSnapshotEntries.size() + " snapshots");
put(tx, stagedDeltaEntries, stagedSnapshotEntries);
stagedDeltaEntries.clear();
stagedSnapshotEntries.clear();
estimatedBytesStaged = 0;
}
}
private final String entityGroupKind;
private final String deltaEntityKind;
private final String snapshotEntityKind;
private final DeltaEntityConverter deltaEntityConverter;
private final CheckedTransaction tx;
private final SlobId objectId;
private final SlobModel model;
@AssistedInject
public MutationLog(@SlobRootEntityKind String entityGroupKind,
@SlobDeltaEntityKind String deltaEntityKind,
@SlobSnapshotEntityKind String snapshotEntityKind,
DeltaEntityConverter deltaEntityConverter,
@Assisted CheckedTransaction tx, @Assisted SlobId objectId,
SlobModel model) {
this.entityGroupKind = entityGroupKind;
this.deltaEntityKind = deltaEntityKind;
this.snapshotEntityKind = snapshotEntityKind;
this.deltaEntityConverter = deltaEntityConverter;
this.tx = Preconditions.checkNotNull(tx, "Null tx");
this.objectId = Preconditions.checkNotNull(objectId, "Null objectId");
this.model = Preconditions.checkNotNull(model, "Null model");
}
/** @see #forwardHistory(long, Long, FetchOptions) */
public DeltaIterator forwardHistory(long minVersion, @Nullable Long maxVersion)
throws PermanentFailure, RetryableFailure {
return forwardHistory(minVersion, maxVersion, FetchOptions.Builder.withDefaults());
}
/**
* Returns an iterator over the specified version range of the mutation log,
* in a forwards direction.
*
* @param maxVersion null to end with the final delta in the mutation log
* (inclusive).
*/
public DeltaIterator forwardHistory(long minVersion, @Nullable Long maxVersion,
FetchOptions fetchOptions) throws PermanentFailure, RetryableFailure {
return getDeltaIterator(minVersion, maxVersion, fetchOptions, true);
}
/** @see #reverseHistory(long, Long, FetchOptions) */
public DeltaIterator reverseHistory(long minVersion, @Nullable Long maxVersion)
throws PermanentFailure, RetryableFailure {
return reverseHistory(minVersion, maxVersion, FetchOptions.Builder.withDefaults());
}
/**
* Returns an iterator over the specified version range of the mutation log,
* in a backwards direction.
*
* @param maxVersion null to begin with the final delta in the mutation log.
*/
public DeltaIterator reverseHistory(long minVersion, @Nullable Long maxVersion,
FetchOptions fetchOptions) throws PermanentFailure, RetryableFailure {
return getDeltaIterator(minVersion, maxVersion, fetchOptions, false);
}
/**
* Returns the current version of the object.
*/
public long getVersion() throws PermanentFailure, RetryableFailure {
DeltaIterator it = reverseHistory(0, null, FetchOptions.Builder.withLimit(1));
if (!it.hasNext()) {
return 0;
}
return it.nextEntry().getResultingVersion();
}
/**
* Tuple of values returned by {@link #prepareAppender()}.
*/
public static class AppenderAndCachedDeltas {
private final Appender appender;
private final List<ChangeData<String>> reverseDeltasRead;
private final DeltaIterator reverseDeltaIterator;
public AppenderAndCachedDeltas(Appender appender,
List<ChangeData<String>> reverseDeltasRead,
DeltaIterator reverseDeltaIterator) {
Preconditions.checkNotNull(appender, "Null appender");
Preconditions.checkNotNull(reverseDeltasRead, "Null reverseDeltasRead");
Preconditions.checkNotNull(reverseDeltaIterator, "Null reverseDeltaIterator");
this.appender = appender;
this.reverseDeltasRead = reverseDeltasRead;
this.reverseDeltaIterator = reverseDeltaIterator;
}
public Appender getAppender() {
return appender;
}
public List<ChangeData<String>> getReverseDeltasRead() {
return reverseDeltasRead;
}
public DeltaIterator getReverseDeltaIterator() {
return reverseDeltaIterator;
}
@Override public String toString() {
return "AppenderAndCachedDeltas("
+ appender + ", "
+ reverseDeltasRead + ", "
+ reverseDeltaIterator
+ ")";
}
}
private void checkDeltaDoesNotExist(long version) throws RetryableFailure, PermanentFailure {
// This check is not necessary but let's be paranoid.
// TODO(danilatos): Make this async and check the result on flush() to
// improve latency. Or, make an informed decision to remove it.
Entity existing = tx.get(makeDeltaKey(objectId, version));
Assert.check(existing == null,
"Datastore fail? Found unexpected delta: %s, %s, %s",
objectId, version, existing);
}
/**
* Creates an {@link Appender} for this mutation log and returns it together
* with some by-products. The by-products can be useful to callers who need
* data from the datastore that overlaps with what was needed to create the
* {@code Appender}, to avoid redudant datastore reads.
*/
public AppenderAndCachedDeltas prepareAppender() throws PermanentFailure, RetryableFailure {
DeltaIterator deltaIterator = getDeltaIterator(
0, null, FetchOptions.Builder.withDefaults(), false);
if (!deltaIterator.hasNext()) {
log.info("Prepared appender at version 0");
checkDeltaDoesNotExist(0);
return new AppenderAndCachedDeltas(
new Appender(createObject(null), 0, 0),
ImmutableList.<ChangeData<String>>of(), deltaIterator);
} else {
SnapshotEntry snapshotEntry = getSnapshotEntryAtOrBefore(null);
StateAndVersion state = createObject(snapshotEntry);
long snapshotVersion = state.getVersion();
long snapshotBytes = snapshotEntry == null ? 0 : estimateSizeBytes(snapshotEntry);
// Read deltas between snapshot and current version. Since we determine
// the current version by reading the first delta (in our reverse
// iterator), we always read at least one delta even if none are needed to
// reconstruct the current version.
DeltaEntry finalDelta = deltaIterator.nextEntry();
long currentVersion = finalDelta.getResultingVersion();
if (currentVersion == snapshotVersion) {
// We read a delta but it precedes the snapshot. It still has to go
// into deltasRead in our AppenderAndCachedDeltas to ensure that there
// is no gap between deltasRead and reverseIterator.
log.info("Prepared appender; snapshotVersion=currentVersion=" + currentVersion);
checkDeltaDoesNotExist(snapshotVersion);
return new AppenderAndCachedDeltas(
new Appender(state, snapshotBytes, 0),
ImmutableList.of(finalDelta.data), deltaIterator);
} else {
// We need to apply the delta, and perhaps others. Collect them.
ImmutableList.Builder<ChangeData<String>> deltaAccu = ImmutableList.builder();
deltaAccu.add(finalDelta.data);
long totalDeltaBytesSinceSnapshot = estimateSizeBytes(finalDelta);
{
DeltaEntry delta = finalDelta;
while (delta.version != snapshotVersion) {
delta = deltaIterator.nextEntry();
deltaAccu.add(delta.data);
totalDeltaBytesSinceSnapshot += estimateSizeBytes(delta);
}
}
ImmutableList<ChangeData<String>> reverseDeltas = deltaAccu.build();
// Now iterate forward and apply the deltas.
for (ChangeData<String> delta : Lists.reverse(reverseDeltas)) {
try {
state.apply(delta);
} catch (ChangeRejected e) {
throw new RuntimeException("Corrupt snapshot or delta history: "
+ objectId + " rejected delta " + delta + ": " + state);
}
}
log.info("Prepared appender; snapshotVersion=" + snapshotVersion
+ ", " + reverseDeltas.size() + " deltas");
checkDeltaDoesNotExist(state.getVersion());
return new AppenderAndCachedDeltas(
new Appender(state, snapshotBytes, totalDeltaBytesSinceSnapshot),
reverseDeltas, deltaIterator);
}
}
}
private DeltaIterator getDeltaIterator(long startVersion, @Nullable Long endVersion,
FetchOptions fetchOptions, boolean forward) throws PermanentFailure, RetryableFailure {
checkRange(startVersion, endVersion);
if (endVersion != null && startVersion == endVersion) {
return new DeltaIterator(CheckedIterator.EMPTY, forward);
}
Query q = new Query(deltaEntityKind)
.setAncestor(makeRootEntityKey(objectId))
.addFilter(Entity.KEY_RESERVED_PROPERTY,
FilterOperator.GREATER_THAN_OR_EQUAL, makeDeltaKey(objectId, startVersion))
.addSort(Entity.KEY_RESERVED_PROPERTY,
forward ? SortDirection.ASCENDING : SortDirection.DESCENDING);
if (endVersion != null) {
q.addFilter(Entity.KEY_RESERVED_PROPERTY,
FilterOperator.LESS_THAN, makeDeltaKey(objectId, endVersion));
}
CheckedIterator result = tx.prepare(q).asIterator(fetchOptions);
return new DeltaIterator(result, forward);
}
/**
* Reconstructs the object at the specified version (current version if null).
*/
public StateAndVersion reconstruct(@Nullable Long atVersion)
throws PermanentFailure, RetryableFailure {
checkRange(atVersion, null);
StateAndVersion state = getSnapshottedState(atVersion);
long startVersion = state.getVersion();
Assert.check(atVersion == null || startVersion <= atVersion);
DeltaIterator it = forwardHistory(startVersion, atVersion);
while (it.hasNext()) {
ChangeData<String> delta = it.next();
try {
state.apply(delta);
} catch (ChangeRejected e) {
throw new PermanentFailure(
"Corrupt snapshot or delta history " + objectId + " @" + state.getVersion(), e);
}
}
if (atVersion != null && state.getVersion() < atVersion) {
throw new RuntimeException("Object max version is " + state.getVersion()
+ ", requested " + atVersion);
}
log.info("Reconstructed requested version " + atVersion
+ " from snapshot at " + startVersion
+ " followed by " + (state.getVersion() - startVersion) + " deltas");
return state;
}
private void put(CheckedTransaction tx,
List<DeltaEntry> newDeltas, List<SnapshotEntry> newSnapshots)
throws PermanentFailure, RetryableFailure {
Preconditions.checkNotNull(newDeltas, "null newEntries");
Preconditions.checkNotNull(newSnapshots, "null newSnapshots");
List<Entity> entities = Lists.newArrayListWithCapacity(newDeltas.size() + newSnapshots.size());
for (DeltaEntry entry : newDeltas) {
Key key = makeDeltaKey(entry);
Entity newEntity = new Entity(key);
populateDeltaEntity(entry, newEntity);
parseDelta(newEntity); // Verify it parses with no exceptions.
entities.add(newEntity);
}
for (SnapshotEntry entry : newSnapshots) {
Key key = makeSnapshotKey(entry);
Entity newEntity = new Entity(key);
populateSnapshotEntity(entry, newEntity);
parseSnapshot(newEntity); // Verify it parses with no exceptions.
entities.add(newEntity);
}
tx.put(entities);
}
private SnapshotEntry getSnapshotEntryAtOrBefore(@Nullable Long atOrBeforeVersion)
throws RetryableFailure, PermanentFailure {
Query q = new Query(snapshotEntityKind)
.setAncestor(makeRootEntityKey(objectId))
.addSort(Entity.KEY_RESERVED_PROPERTY, SortDirection.DESCENDING);
if (atOrBeforeVersion != null) {
q = q.addFilter(Entity.KEY_RESERVED_PROPERTY, FilterOperator.LESS_THAN_OR_EQUAL,
makeSnapshotKey(objectId, atOrBeforeVersion));
}
Entity e = tx.prepare(q).getFirstResult();
log.info("query " + q + " returned first result " + e);
return e == null ? null : parseSnapshot(e);
}
private StateAndVersion createObject(@Nullable SnapshotEntry entry) {
String snapshot = entry == null ? null : entry.snapshot;
long version = entry == null ? 0 : entry.version;
try {
return new StateAndVersion(model.create(snapshot), version);
} catch (InvalidSnapshot e) {
throw new RuntimeException("Could not create model from snapshot at version " + version
+ ": " + snapshot, e);
}
}
/**
* Constructs a model object from the snapshot with the highest version less
* than or equal to atOrBeforeVersion.
*
* @param atOrBeforeVersion null for current version
*/
public StateAndVersion getSnapshottedState(@Nullable Long atOrBeforeVersion)
throws PermanentFailure, RetryableFailure {
return createObject(getSnapshotEntryAtOrBefore(atOrBeforeVersion));
}
private void checkRange(@Nullable Long startVersion, @Nullable Long endVersion) {
if (startVersion == null) {
Preconditions.checkArgument(endVersion == null,
"startVersion == null implies endVersion == null, not %s", endVersion);
} else {
Preconditions.checkArgument(startVersion >= 0 &&
(endVersion == null || startVersion <= endVersion),
"Invalid range requested (%s to %s)", startVersion, endVersion);
// I doubt this would really happen, but...
Assert.check(endVersion == null || (endVersion - startVersion <= Integer.MAX_VALUE),
"Range too large: %s to %s", startVersion, endVersion);
}
}
// TODO(ohler): eliminate; PreCommitHook should be enough
@Nullable public String getMetadata() throws RetryableFailure, PermanentFailure {
Key key = makeRootEntityKey(objectId);
log.info("Looking up metadata " + key);
Entity result = tx.get(key);
log.info("Got " + result);
return result == null ? null
: DatastoreUtil.getExistingProperty(result, METADATA_PROPERTY, Text.class).getValue();
}
// TODO(ohler): eliminate; PreCommitHook should be enough
public void putMetadata(String metadata) throws RetryableFailure, PermanentFailure {
Key key = makeRootEntityKey(objectId);
Entity e = new Entity(key);
DatastoreUtil.setNonNullUnindexedProperty(e, METADATA_PROPERTY, new Text(metadata));
log.info("Writing metadata: " + e);
tx.put(e);
}
}
| |
/*
* Abora-Gold
* Part of the Abora hypertext project: http://www.abora.org
* Copyright 2003, 2005 David G Jones
*
* Translated from Udanax-Gold source code: http://www.udanax.com
* Copyright 1979-1999 Udanax.com. All rights reserved
*/
package info.dgjones.abora.gold.collection.tables;
import info.dgjones.abora.gold.collection.steppers.ArrayAccumulator;
import info.dgjones.abora.gold.collection.steppers.Stepper;
import info.dgjones.abora.gold.collection.steppers.TableAccumulator;
import info.dgjones.abora.gold.collection.steppers.TableStepper;
import info.dgjones.abora.gold.collection.tables.ActualArray;
import info.dgjones.abora.gold.collection.tables.IntegerTable;
import info.dgjones.abora.gold.collection.tables.MuArray;
import info.dgjones.abora.gold.collection.tables.OffsetScruArray;
import info.dgjones.abora.gold.collection.tables.ScruTable;
import info.dgjones.abora.gold.java.AboraSupport;
import info.dgjones.abora.gold.java.exception.AboraRuntimeException;
import info.dgjones.abora.gold.java.exception.SubclassResponsibilityException;
import info.dgjones.abora.gold.java.missing.Signal;
import info.dgjones.abora.gold.java.missing.smalltalk.Set;
import info.dgjones.abora.gold.spaces.basic.CoordinateSpace;
import info.dgjones.abora.gold.spaces.basic.Dsp;
import info.dgjones.abora.gold.spaces.basic.OrderSpec;
import info.dgjones.abora.gold.spaces.basic.Position;
import info.dgjones.abora.gold.spaces.basic.XnRegion;
import info.dgjones.abora.gold.spaces.integers.IntegerPos;
import info.dgjones.abora.gold.spaces.integers.IntegerSpace;
import info.dgjones.abora.gold.xcvr.Rcvr;
import info.dgjones.abora.gold.xpp.basic.Heaper;
/**
* The class XuArray is intended to model zero-based arrays with integer keys (indices).
* This makes them like the array primitive in C and C++. There is an additional constraint,
* which is they are to have simple domains. Therefore they should not be constructed with
* non-contiguous sections. This is not currently enforced. Given that it is enforced, an
* XuArray with count N would have as its domain exactly the integers from 0 to N-1.
* There is some controversy over whether XuArray should be a type and enforce this contraint
* (by BLASTing if an attempt is made to violate the constraint), or whether XuArray is just
* a specialized implementation for when an IntegerTable happens to meet this constraint; in
* which case it should "become" a more general implementation when an attempt is made to
* violate the constraint (see "Type Safe Become"). In the latter case, XuArray will
* probably be made a private class as well. Please give us your opinion.
* XuArray provides no additional protocol.
*/
public class MuArray extends IntegerTable {
protected static Signal MustBeContiguousDomainSignal;
/*
udanax-top.st:49108:
IntegerTable subclass: #MuArray
instanceVariableNames: ''
classVariableNames: 'MustBeContiguousDomainSignal {Signal smalltalk} '
poolDictionaries: ''
category: 'Xanadu-Collection-Tables'!
*/
/*
udanax-top.st:49112:
MuArray comment:
'The class XuArray is intended to model zero-based arrays with integer keys (indices).
This makes them like the array primitive in C and C++. There is an additional constraint, which is they are to have simple domains. Therefore they should not be constructed with non-contiguous sections. This is not currently enforced. Given that it is enforced, an XuArray with count N would have as its domain exactly the integers from 0 to N-1.
There is some controversy over whether XuArray should be a type and enforce this contraint (by BLASTing if an attempt is made to violate the constraint), or whether XuArray is just a specialized implementation for when an IntegerTable happens to meet this constraint; in which case it should "become" a more general implementation when an attempt is made to violate the constraint (see "Type Safe Become"). In the latter case, XuArray will probably be made a private class as well. Please give us your opinion.
XuArray provides no additional protocol.'!
*/
/*
udanax-top.st:49120:
(MuArray getOrMakeCxxClassDescription)
attributes: ((Set new) add: #DEFERRED; yourself)!
*/
/*
udanax-top.st:49243:
MuArray class
instanceVariableNames: ''!
*/
/*
udanax-top.st:49246:
(MuArray getOrMakeCxxClassDescription)
attributes: ((Set new) add: #DEFERRED; yourself)!
*/
public static void initializeClassAttributes() {
AboraSupport.findAboraClass(MuArray.class).setAttributes( new Set().add("DEFERRED"));
/*
Generated during transformation: AddMethod
*/
}
public Heaper intStore(int key, Heaper value) {
throw new SubclassResponsibilityException();
/*
udanax-top.st:49125:MuArray methodsFor: 'accessing'!
{Heaper} atInt: key {IntegerVar} store: value {Heaper}
self subclassResponsibility!
*/
}
public CoordinateSpace coordinateSpace() {
return IntegerSpace.make();
/*
udanax-top.st:49129:MuArray methodsFor: 'accessing'!
{CoordinateSpace} coordinateSpace
^ IntegerSpace make!
*/
}
public int count() {
throw new SubclassResponsibilityException();
/*
udanax-top.st:49133:MuArray methodsFor: 'accessing'!
{IntegerVar} count
self subclassResponsibility.!
*/
}
public XnRegion domain() {
throw new SubclassResponsibilityException();
/*
udanax-top.st:49137:MuArray methodsFor: 'accessing'!
{XnRegion} domain
self subclassResponsibility.!
*/
}
public int highestIndex() {
throw new SubclassResponsibilityException();
/*
udanax-top.st:49141:MuArray methodsFor: 'accessing'!
{IntegerVar} highestIndex
self subclassResponsibility!
*/
}
public Heaper intFetch(int key) {
throw new SubclassResponsibilityException();
/*
udanax-top.st:49144:MuArray methodsFor: 'accessing'!
{Heaper} intFetch: key {IntegerVar}
self subclassResponsibility!
*/
}
public boolean intWipe(int anIdx) {
throw new SubclassResponsibilityException();
/*
udanax-top.st:49147:MuArray methodsFor: 'accessing'!
{BooleanVar} intWipe: anIdx {IntegerVar}
self subclassResponsibility!
*/
}
public int lowestIndex() {
throw new SubclassResponsibilityException();
/*
udanax-top.st:49150:MuArray methodsFor: 'accessing'!
{IntegerVar} lowestIndex
self subclassResponsibility!
*/
}
/**
* Return a table which contains the elements from start to stop, starting at firstIndex.
* Zero-based subclasses will blast if firstIndex is non-zero
*/
public ScruTable offsetSubTableBetween(int startIndex, int stopIndex, int firstIndex) {
return subTableBetween(startIndex, stopIndex);
/*
udanax-top.st:49153:MuArray methodsFor: 'accessing'!
{ScruTable} offsetSubTableBetween: startIndex {IntegerVar}
with: stopIndex {IntegerVar}
with: firstIndex {IntegerVar unused}
"Return a table which contains the elements from start to stop, starting at firstIndex.
Zero-based subclasses will blast if firstIndex is non-zero"
^ self subTableBetween: startIndex with: stopIndex!
*/
}
public ScruTable subTable(XnRegion region) {
throw new SubclassResponsibilityException();
/*
udanax-top.st:49161:MuArray methodsFor: 'accessing'!
{ScruTable} subTable: region {XnRegion}
self subclassResponsibility!
*/
}
public ScruTable subTableBetween(int startLoc, int endLoc) {
throw new SubclassResponsibilityException();
/*
udanax-top.st:49165:MuArray methodsFor: 'accessing'!
{ScruTable} subTableBetween: startLoc {IntegerVar} with: endLoc {IntegerVar}
self subclassResponsibility!
*/
}
public ScruTable transformedBy(Dsp dsp) {
if (dsp.inverse().isEqual(dsp)) {
return this;
}
else {
return MuArray.offsetScruArray(this, dsp);
}
/*
udanax-top.st:49168:MuArray methodsFor: 'accessing'!
{ScruTable} transformedBy: dsp {Dsp}
(dsp inverse isEqual: dsp)
ifTrue: [^self]
ifFalse: [^MuArray offsetScruArray: self with: dsp]!
*/
}
public ScruTable copy() {
throw new SubclassResponsibilityException();
/*
udanax-top.st:49176:MuArray methodsFor: 'creation'!
{ScruTable} copy
self subclassResponsibility!
*/
}
public ScruTable emptySize(int size) {
throw new SubclassResponsibilityException();
/*
udanax-top.st:49179:MuArray methodsFor: 'creation'!
{ScruTable} emptySize: size {IntegerVar}
self subclassResponsibility!
*/
}
public boolean includesIntKey(int aKey) {
return aKey >= 0 && (aKey < count());
/*
udanax-top.st:49185:MuArray methodsFor: 'testing'!
{BooleanVar} includesIntKey: aKey {IntegerVar}
^aKey >= IntegerVar0 and: [aKey < self count]!
*/
}
public boolean isEmpty() {
return count() == 0;
/*
udanax-top.st:49188:MuArray methodsFor: 'testing'!
{BooleanVar} isEmpty
^self count = IntegerVar0!
*/
}
public XnRegion runAtInt(int key) {
throw new SubclassResponsibilityException();
/*
udanax-top.st:49193:MuArray methodsFor: 'runs'!
{XnRegion} runAtInt: key {IntegerVar}
self subclassResponsibility!
*/
}
/**
* Return a stepper on this table.
*/
public TableStepper stepper(OrderSpec order) {
throw new SubclassResponsibilityException();
/*
udanax-top.st:49198:MuArray methodsFor: 'enumerating'!
{TableStepper} stepper: order {OrderSpec default: NULL}
"Return a stepper on this table."
self subclassResponsibility!
*/
}
public Heaper theOne() {
if (count() != 1) {
throw new AboraRuntimeException(AboraRuntimeException.NOT_ONE_ELEMENT);
}
return intFetch(0);
/*
udanax-top.st:49203:MuArray methodsFor: 'enumerating'!
{Heaper} theOne
self count ~~ 1 ifTrue:
[ Heaper BLAST: #NotOneElement ].
^ self intFetch: IntegerVar0!
*/
}
/**
* I 'wipe' from myself all associations whose key
* is in 'region'. See MuTable::wipe
*/
public void wipeAll(XnRegion region) {
if ( ! (region.coordinateSpace().isEqual(coordinateSpace()))) {
throw new AboraRuntimeException(AboraRuntimeException.WRONG_COORD_SPACE);
}
if (isEmpty()) {
return ;
}
if ( ! (region.isSimple())) {
throw new AboraRuntimeException(AboraRuntimeException.NOT_SIMPLE);
}
Stepper stomper = ((region.intersect(domain())).stepper((IntegerSpace.make().getDescending())));
for (; stomper.hasValue(); stomper.step()) {
IntegerPos p = (IntegerPos) stomper.fetch();
if (p == null) {
continue ;
}
intWipe(p.asIntegerVar());
}
stomper.destroy();
/*
udanax-top.st:49210:MuArray methodsFor: 'bulk operations'!
{void} wipeAll: region {XnRegion}
"I 'wipe' from myself all associations whose key
is in 'region'. See MuTable::wipe"
(region coordinateSpace isEqual: self coordinateSpace)
ifFalse: [Heaper BLAST: #WrongCoordSpace].
self isEmpty ifTrue: [^VOID].
region isSimple ifFalse: [Heaper BLAST: #NotSimple].
((region intersect: self domain)
stepper: (IntegerSpace make getDescending))
forEach: [:p {IntegerPos} | self intWipe: p asIntegerVar]!
*/
}
public Heaper store(Position key, Heaper value) {
return intStore(((IntegerPos) key).asIntegerVar(), value);
/*
udanax-top.st:49224:MuArray methodsFor: 'overload junk'!
{Heaper} at: key {Position} store: value {Heaper}
^ self atInt: (key cast: IntegerPos) asIntegerVar store: value!
*/
}
public Heaper fetch(Position key) {
return intFetch((((IntegerPos) key).asIntegerVar()));
/*
udanax-top.st:49228:MuArray methodsFor: 'overload junk'!
{Heaper} fetch: key {Position}
^ self intFetch: ((key cast: IntegerPos) asIntegerVar)!
*/
}
public boolean includesKey(Position aKey) {
return includesIntKey((((IntegerPos) aKey).asIntegerVar()));
/*
udanax-top.st:49232:MuArray methodsFor: 'overload junk'!
{BooleanVar} includesKey: aKey {Position}
^self includesIntKey: ((aKey cast: IntegerPos) asIntegerVar)!
*/
}
public XnRegion runAt(Position key) {
return runAtInt((((IntegerPos) key).asIntegerVar()));
/*
udanax-top.st:49235:MuArray methodsFor: 'overload junk'!
{XnRegion} runAt: key {Position}
^self runAtInt: ((key quickCast: IntegerPos) asIntegerVar)!
*/
}
public boolean wipe(Position key) {
return intWipe((((IntegerPos) key).asIntegerVar()));
/*
udanax-top.st:49239:MuArray methodsFor: 'overload junk'!
{BooleanVar} wipe: key {Position}
^ self intWipe: ((key cast: IntegerPos) asIntegerVar)!
*/
}
/**
* A new empty XnArray
*/
public static MuArray array() {
return (MuArray) MuArray.makeIntegerVar(1);
/*
udanax-top.st:49251:MuArray class methodsFor: 'creation'!
{MuArray INLINE} array
"A new empty XnArray"
^MuArray make.IntegerVar: 1!
*/
}
/**
* A new XnArray initialized with a single element, 'obj0', stored at index 0.
*/
public static MuArray array(Heaper obj0) {
MuArray table;
table = (MuArray) MuArray.makeIntegerVar(1);
table.intStore(0, obj0);
return table;
/*
udanax-top.st:49256:MuArray class methodsFor: 'creation'!
{MuArray} array: obj0 {Heaper}
"A new XnArray initialized with a single element, 'obj0', stored at index 0."
| table {MuArray} |
table _ MuArray make.IntegerVar: 1.
table atInt: IntegerVar0 store: obj0.
^table!
*/
}
/**
* A new XnArray initialized with a two elements stored at indicies 0 and 1.
*/
public static MuArray array(Heaper obj0, Heaper obj1) {
MuArray table;
table = (MuArray) MuArray.makeIntegerVar(2);
table.intStore(0, obj0);
table.intStore(1, obj1);
return table;
/*
udanax-top.st:49264:MuArray class methodsFor: 'creation'!
{MuArray} array: obj0 {Heaper} with: obj1 {Heaper}
"A new XnArray initialized with a two elements stored at indicies 0 and 1."
| table {MuArray} |
table _ MuArray make.IntegerVar: 2.
table atInt: IntegerVar0 store: obj0.
table atInt: 1 store: obj1.
^table!
*/
}
/**
* A new XuArray initialized with a three elements stored at indicies 0, 1, and 2.
*/
public static MuArray array(Heaper obj0, Heaper obj1, Heaper obj2) {
MuArray table;
table = (MuArray) MuArray.makeIntegerVar(3);
table.intStore(0, obj0);
table.intStore(1, obj1);
table.intStore(2, obj2);
return table;
/*
udanax-top.st:49273:MuArray class methodsFor: 'creation'!
{MuArray} array: obj0 {Heaper} with: obj1 {Heaper} with: obj2 {Heaper}
"A new XuArray initialized with a three elements stored at indicies 0, 1, and 2."
| table {MuArray} |
table _ MuArray make.IntegerVar: 3.
table atInt: IntegerVar0 store: obj0.
table atInt: 1 store: obj1.
table atInt: 2 store: obj2.
^table!
*/
}
/**
* A new XuArray initialized with a four elements stored at indicies 0 through 3.
*/
public static MuArray array(Heaper obj0, Heaper obj1, Heaper obj2, Heaper obj3) {
MuArray table;
table = (MuArray) MuArray.makeIntegerVar(4);
table.intStore(0, obj0);
table.intStore(1, obj1);
table.intStore(2, obj2);
table.intStore(3, obj3);
return table;
/*
udanax-top.st:49283:MuArray class methodsFor: 'creation'!
{MuArray} array: obj0 {Heaper}
with: obj1 {Heaper}
with: obj2 {Heaper}
with: obj3 {Heaper}
"A new XuArray initialized with a four elements stored at indicies 0 through 3."
| table {MuArray} |
table _ MuArray make.IntegerVar: 4.
table atInt: IntegerVar0 store: obj0.
table atInt: 1 store: obj1.
table atInt: 2 store: obj2.
table atInt: 3 store: obj3.
^table!
*/
}
/**
* Returns an Accumulator which will produce an XuArray of the elements
* accumulated into it in order of accumulation. See XuArray. Equivalent to
* 'tableAccumulator()'. Eventually either he or I should be declared obsolete.
*/
public static TableAccumulator arrayAccumulator() {
return ArrayAccumulator.make(MuArray.array());
/*
udanax-top.st:49297:MuArray class methodsFor: 'creation'!
{TableAccumulator} arrayAccumulator
"Returns an Accumulator which will produce an XuArray of the elements
accumulated into it in order of accumulation. See XuArray. Equivalent to
'tableAccumulator()'. Eventually either he or I should be declared obsolete."
^ ArrayAccumulator make: MuArray array!
*/
}
/**
* An accumulator which will accumulate by appending elements onto the end of
* 'onArray'. It is an error for anyone else to modify 'onArray' between creating
* this accumulator and accumulating into it. acc->value() will return 'onArray'
* itself.
*/
public static TableAccumulator arrayAccumulator(MuArray onArray) {
return ArrayAccumulator.make(onArray);
/*
udanax-top.st:49304:MuArray class methodsFor: 'creation'!
{TableAccumulator} arrayAccumulator: onArray {MuArray}
"An accumulator which will accumulate by appending elements onto the end of
'onArray'. It is an error for anyone else to modify 'onArray' between creating
this accumulator and accumulating into it. acc->value() will return 'onArray'
itself."
^ArrayAccumulator make: onArray!
*/
}
/**
* 'someSize' is a hint about how big we should expect the array to need to grow.
*/
public static IntegerTable makeIntegerVar(int someSize) {
return new ActualArray(someSize);
/*
udanax-top.st:49312:MuArray class methodsFor: 'creation'!
make.IntegerVar: someSize {IntegerVar}
"'someSize' is a hint about how big we should expect the array to need to grow."
^ActualArray create.IntegerVar: someSize!
*/
}
/**
* The resulting ScruTable is a view onto 'array'. It is a view in which each key
* is offset by 'dsp' from where it is in 'array'. By saying it is a view, we mean
* that as 'array' is modified, the view tracks the changes.
*/
public static ScruTable offsetScruArray(MuArray array, Dsp dsp) {
return OffsetScruArray.make(array, dsp);
/*
udanax-top.st:49317:MuArray class methodsFor: 'creation'!
{ScruTable} offsetScruArray: array {MuArray} with: dsp {Dsp}
"The resulting ScruTable is a view onto 'array'. It is a view in which each key
is offset by 'dsp' from where it is in 'array'. By saying it is a view, we mean
that as 'array' is modified, the view tracks the changes."
^OffsetScruArray make: array with: dsp!
*/
}
public MuArray() {
/*
Generated during transformation
*/
}
public MuArray(Rcvr receiver) {
super(receiver);
/*
Generated during transformation
*/
}
}
| |
/*******************************************************************************
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
******************************************************************************/
package org.apache.olingo.odata2.fit.ref;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertArrayEquals;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.net.URI;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import junit.framework.Assert;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.InputStreamEntity;
import org.apache.http.entity.StringEntity;
import org.apache.olingo.odata2.api.client.batch.BatchChangeSet;
import org.apache.olingo.odata2.api.client.batch.BatchChangeSetPart;
import org.apache.olingo.odata2.api.client.batch.BatchPart;
import org.apache.olingo.odata2.api.client.batch.BatchSingleResponse;
import org.apache.olingo.odata2.api.commons.HttpHeaders;
import org.apache.olingo.odata2.api.ep.EntityProvider;
import org.apache.olingo.odata2.core.batch.BatchRequestWriter;
import org.apache.olingo.odata2.ref.processor.Util;
import org.apache.olingo.odata2.testutil.helper.StringHelper;
import org.apache.olingo.odata2.testutil.server.ServletType;
import org.junit.Test;
/**
*
*
*/
public class BatchTest extends AbstractRefTest {
private static final String PUT = "PUT";
private static final String POST = "POST";
private static final String BOUNDARY = "batch_123";
public BatchTest(final ServletType servletType) {
super(servletType);
}
@Test
public void testSimpleBatch() throws Exception {
String responseBody = execute("/simple.batch");
assertFalse(responseBody
.contains("<error xmlns=\"http://schemas.microsoft.com/ado/2007/08/dataservices/metadata\">"));
assertTrue(responseBody.contains(
"<edmx:Edmx xmlns:edmx=\"http://schemas.microsoft.com/ado/2007/06/edmx\" Version=\"1.0\""));
}
@Test
public void functionImportBatch() throws Exception {
String responseBody = execute("/functionImport.batch");
assertFalse(responseBody
.contains("<error xmlns=\"http://schemas.microsoft.com/ado/2007/08/dataservices/metadata\">"));
assertTrue(responseBody.contains("HTTP/1.1 200 OK"));
assertTrue(responseBody.contains("<?xml version='1.0' encoding='utf-8'?><ManagerPhoto xmlns="));
}
@Test
public void employeesWithFilterBatch() throws Exception {
String responseBody = execute("/employeesWithFilter.batch");
assertFalse(responseBody
.contains("<error xmlns=\"http://schemas.microsoft.com/ado/2007/08/dataservices/metadata\">"));
assertTrue(responseBody.contains("HTTP/1.1 200 OK"));
assertTrue(responseBody.contains("<d:EmployeeName>Walter Winter</d:EmployeeName>"));
}
@Test
public void testChangeSetBatch() throws Exception {
String responseBody = execute("/changeset.batch");
assertTrue(responseBody.contains("Frederic Fall MODIFIED"));
}
@Test
public void testContentIdReferencing() throws Exception {
String responseBody = execute("/batchWithContentId.batch");
assertTrue(responseBody.contains("HTTP/1.1 201 Created"));
assertTrue(responseBody.contains("HTTP/1.1 204 No Content"));
assertTrue(responseBody.contains("HTTP/1.1 200 OK"));
assertTrue(responseBody.contains("\"EmployeeName\":\"Frederic Fall MODIFIED\""));
assertTrue(responseBody.contains("\"Age\":40"));
}
@Test
public void testContentIdEchoing() throws Exception {
String responseBody = execute("/batchWithContentId.batch");
assertTrue(responseBody.contains("Content-Id: 1"));
assertTrue(responseBody.contains("Content-Id: 2"));
assertTrue(responseBody.contains("Content-Id: 3"));
assertTrue(responseBody.contains("Content-Id: 4"));
assertTrue(responseBody.contains("Content-Id: AAA"));
assertTrue(responseBody.contains("Content-Id: newEmployee"));
}
@Test
public void testWrongContentId() throws Exception {
HttpResponse response = execute("/batchWithWrongContentId.batch", "batch_cf90-46e5-1246");
String responseBody = StringHelper.inputStreamToString(response.getEntity().getContent(), true);
assertTrue(responseBody.contains("HTTP/1.1 404 Not Found"));
}
@Test
public void testFailFirstRequest() throws Exception {
HttpResponse response = execute("/batchFailFirstCreateRequest.batch", "batch_cf90-46e5-1246");
String responseBody = StringHelper.inputStreamToString(response.getEntity().getContent(), true);
assertTrue(responseBody.contains("HTTP/1.1 404 Not Found"));
}
@Test
public void testGPPG() throws Exception {
HttpResponse response = execute("/batchWithContentIdPart2.batch", "batch_cf90-46e5-1246");
String responseBody = StringHelper.inputStreamToString(response.getEntity().getContent(), true);
assertContentContainValues(responseBody,
"{\"d\":{\"EmployeeName\":\"Frederic Fall\"}}",
"HTTP/1.1 201 Created",
"Content-Id: employee",
"Content-Type: application/json;odata=verbose",
"\"EmployeeId\":\"7\",\"EmployeeName\":\"Employee 7\",",
"HTTP/1.1 204 No Content",
"Content-Id: AAA",
"{\"d\":{\"EmployeeName\":\"Robert Fall\"}}");
// validate that response for PUT does not contains a Content Type
int indexNoContent = responseBody.indexOf("HTTP/1.1 204 No Content");
int indexBoundary = responseBody.indexOf("--changeset_", indexNoContent);
int indexContentType = responseBody.indexOf("Content-Type:", indexNoContent);
Assert.assertTrue(indexBoundary < indexContentType);
}
@Test
public void testErrorBatch() throws Exception {
String responseBody = execute("/error.batch");
assertTrue(responseBody.contains("HTTP/1.1 404 Not Found"));
}
/**
* Validate that given <code>content</code> contains all <code>values</code> in the given order.
*
* @param content
* @param containingValues
*/
private void assertContentContainValues(final String content, final String... containingValues) {
int index = -1;
for (String value : containingValues) {
int newIndex = content.indexOf(value, index);
Assert.assertTrue("Value '" + value + "' not found after index position '" + index + "'.", newIndex >= 0);
index = newIndex;
}
}
private String execute(final String batchResource) throws Exception {
HttpResponse response = execute(batchResource, "batch_123");
String responseBody = StringHelper.inputStreamToStringCRLFLineBreaks(response.getEntity().getContent());
return responseBody;
}
private HttpResponse execute(final String batchResource, final String boundary) throws IOException,
UnsupportedEncodingException, ClientProtocolException {
final HttpPost post = new HttpPost(URI.create(getEndpoint().toString() + "$batch"));
post.setHeader("Content-Type", "multipart/mixed;boundary=" + boundary);
String body = StringHelper.inputStreamToStringCRLFLineBreaks(this.getClass().getResourceAsStream(batchResource));
HttpEntity entity = new StringEntity(body);
post.setEntity(entity);
HttpResponse response = getHttpClient().execute(post);
assertNotNull(response);
assertEquals(202, response.getStatusLine().getStatusCode());
return response;
}
/**
* @param method
* @param data
* @param contentType
* @return
*/
private InputStream createBatchRequest(String method, byte[] data, String contentType) {
List<BatchPart> batch = new ArrayList<BatchPart>();
Map<String, String> headers = new HashMap<String, String>();
BatchChangeSetPart request = null;
if (method.equalsIgnoreCase(PUT)) {
headers.put("content-type", contentType);
request = BatchChangeSetPart.method(PUT)
.uri("Employees('2')/$value")
.body(data)
.headers(headers)
.contentId("1")
.build();
} else if (method.equalsIgnoreCase(POST)) {
headers.put("content-type", contentType);
request = BatchChangeSetPart.method(POST)
.uri("Employees")
.body(data)
.headers(headers)
.contentId("1")
.build();
}
BatchChangeSet changeSet = BatchChangeSet.newBuilder().build();
changeSet.add(request);
batch.add(changeSet);
BatchRequestWriter writer = new BatchRequestWriter();
InputStream batchRequest = writer.writeBatchRequest(batch, BOUNDARY);
return batchRequest;
}
@Test
public void testBatchWithChangesetWithRawBytesInPutOperation() throws Exception {
InputStream requestPayload = createBatchRequestWithRawBytes(PUT);
final HttpPost put = new HttpPost(URI.create(getEndpoint().toString() + "$batch"));
put.setHeader("Content-Type", "multipart/mixed;boundary=" + BOUNDARY);
HttpEntity entity = new InputStreamEntity(requestPayload, -1);
put.setEntity(entity);
HttpResponse response = getHttpClient().execute(put);
byte[] actualData = Util.getInstance().getBinaryContent();
byte[] expectedData = rawBytes();
// Comparing data stored in the data source and the data sent in the request
assertArrayEquals(actualData, expectedData);
assertNotNull(response);
assertEquals(202, response.getStatusLine().getStatusCode());
String responseBody = StringHelper.inputStreamToStringCRLFLineBreaks(response.getEntity().getContent());
assertTrue(responseBody.contains("204 No Content"));
HttpResponse resp = execute("/simpleGet.batch", BOUNDARY);
InputStream in = resp.getEntity().getContent();
StringHelper.Stream batchRequestStream = StringHelper.toStream(in);
String requestBody = batchRequestStream.asString();
String contentType = resp.getFirstHeader(HttpHeaders.CONTENT_TYPE).getValue();
List<BatchSingleResponse> responses = EntityProvider.parseBatchResponse(
new ByteArrayInputStream(requestBody.getBytes("iso-8859-1")), contentType);
for (BatchSingleResponse batchResp : responses) {
assertEquals("200", batchResp.getStatusCode());
assertEquals("OK", batchResp.getStatusInfo());
assertArrayEquals(batchResp.getBody().getBytes("iso-8859-1"), actualData);
}
}
@Test
public void testBatchWithChangesetWithRawBytesInPOSTOperation() throws Exception {
InputStream requestPayload = createBatchRequestWithRawBytes(POST);
final HttpPost put = new HttpPost(URI.create(getEndpoint().toString() + "$batch"));
put.setHeader("Content-Type", "multipart/mixed;boundary=" + BOUNDARY);
HttpEntity entity = new InputStreamEntity(requestPayload, -1);
put.setEntity(entity);
HttpResponse response = getHttpClient().execute(put);
byte[] actualData = Util.getInstance().getBinaryContent();
byte[] expectedData = rawBytes();
// Comparing data stored in the data source and the data sent in the request
assertArrayEquals(actualData, expectedData);
assertNotNull(response);
assertEquals(202, response.getStatusLine().getStatusCode());
String responseBody = StringHelper.inputStreamToStringCRLFLineBreaks(response.getEntity().getContent());
assertTrue(responseBody.contains("201 Created"));
HttpResponse resp = execute("/simpleGet1.batch", BOUNDARY);
InputStream in = resp.getEntity().getContent();
StringHelper.Stream batchRequestStream = StringHelper.toStream(in);
String requestBody = batchRequestStream.asString();
String contentType = resp.getFirstHeader(HttpHeaders.CONTENT_TYPE).getValue();
List<BatchSingleResponse> responses = EntityProvider.parseBatchResponse(
new ByteArrayInputStream(requestBody.getBytes("iso-8859-1")), contentType);
for (BatchSingleResponse batchResp : responses) {
assertEquals("200", batchResp.getStatusCode());
assertEquals("OK", batchResp.getStatusInfo());
assertArrayEquals(batchResp.getBody().getBytes("iso-8859-1"), expectedData);
}
}
@Test
public void testBatchWithChangesetWithImageObjectInPutOperation() throws Exception {
InputStream requestPayload = createBatchRequestWithImage("/Employee_1.png", PUT);
final HttpPost put = new HttpPost(URI.create(getEndpoint().toString() + "$batch"));
put.setHeader("Content-Type", "multipart/mixed;boundary=" + BOUNDARY);
HttpEntity entity = new InputStreamEntity(requestPayload, -1);
put.setEntity(entity);
HttpResponse response = getHttpClient().execute(put);
byte[] actualData = Util.getInstance().getBinaryContent();
byte[] expectedData = getImageData("/Employee_1.png");
// Comparing data stored in the data source and the data sent in the request
assertArrayEquals(actualData, expectedData);
assertNotNull(response);
assertEquals(202, response.getStatusLine().getStatusCode());
String responseBody = StringHelper.inputStreamToStringCRLFLineBreaks(response.getEntity().getContent());
assertTrue(responseBody.contains("204 No Content"));
HttpResponse resp = execute("/simpleGet.batch", BOUNDARY);
InputStream in = resp.getEntity().getContent();
StringHelper.Stream batchRequestStream = StringHelper.toStream(in);
String requestBody = batchRequestStream.asString();
String contentType = resp.getFirstHeader(HttpHeaders.CONTENT_TYPE).getValue();
List<BatchSingleResponse> responses = EntityProvider.parseBatchResponse(
new ByteArrayInputStream(requestBody.getBytes("iso-8859-1")), contentType);
for (BatchSingleResponse batchResp : responses) {
assertEquals("200", batchResp.getStatusCode());
assertEquals("OK", batchResp.getStatusInfo());
assertArrayEquals(batchResp.getBody().getBytes("iso-8859-1"), actualData);
}
}
private InputStream createBatchRequestWithImage(String imageUrl, String method) throws IOException {
byte[] data = getImageData(imageUrl);
return createBatchRequest(method, data, "image/jpeg");
}
private InputStream createBatchRequestWithRawBytes(String method) {
byte[] data = rawBytes();
return createBatchRequest(method, data, "application/octect-stream");
}
/**
* @return
*/
private byte[] rawBytes() {
byte[] data = new byte[Byte.MAX_VALUE - Byte.MIN_VALUE + 1];
// binary content, not a valid UTF-8 representation of a string
for (int i = Byte.MIN_VALUE; i <= Byte.MAX_VALUE; i++) {
data[i - Byte.MIN_VALUE] = (byte) i;
}
return data;
}
/**
* @param imageUrl
* @return
* @throws IOException
*/
private byte[] getImageData(String imageUrl) throws IOException {
byte[] data = null;
try {
InputStream in = this.getClass().getResourceAsStream(imageUrl);
ByteArrayOutputStream stream = new ByteArrayOutputStream();
int b = 0;
while ((b = in.read()) != -1) {
stream.write(b);
}
data = stream.toByteArray();
} catch (IOException e) {
throw new IOException(e);
}
return data;
}
}
| |
/*
# Licensed Materials - Property of IBM
# Copyright IBM Corp. 2015
*/
package com.ibm.streamsx.topology.test.spl;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.concurrent.TimeUnit;
import org.junit.Before;
import org.junit.Test;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.ibm.streams.operator.StreamSchema;
import com.ibm.streams.operator.Tuple;
import com.ibm.streams.operator.Type;
import com.ibm.streams.operator.Type.MetaType;
import com.ibm.streams.operator.types.RString;
import com.ibm.streamsx.topology.TStream;
import com.ibm.streamsx.topology.Topology;
import com.ibm.streamsx.topology.context.ContextProperties;
import com.ibm.streamsx.topology.function.Supplier;
import com.ibm.streamsx.topology.spl.SPL;
import com.ibm.streamsx.topology.spl.SPLStream;
import com.ibm.streamsx.topology.spl.SPLStreams;
import com.ibm.streamsx.topology.test.TestTopology;
import com.ibm.streamsx.topology.tester.Condition;
import com.ibm.streamsx.topology.tester.Tester;
public class SPLOperatorsTest extends TestTopology {
@Before
public void runSpl() {
assumeSPLOk();
}
/**
* Test we can invoke an SPL operator.
*/
@Test
public void testSPLOperator() throws Exception {
Topology topology = new Topology("testSPLOperator");
SPLStream tuples = SPLStreamsTest.testTupleStream(topology);
// Filter on the vi attribute, passing the value 321.
Map<String,Object> params = new HashMap<>();
params.put("attr", tuples.getSchema().getAttribute("vi"));
params.put("value", 321);
SPL.addToolkit(tuples, new File(getTestRoot(), "spl/testtk"));
SPL.addToolkitDependency(tuples, "com.ibm.streamsx.topology.testing.testtk", "0.9.9");
SPLStream int32Filtered = SPL.invokeOperator("testspl::Int32Filter", tuples, tuples.getSchema(), params);
Tester tester = topology.getTester();
Condition<Long> expectedCount = tester.tupleCount(int32Filtered, 2);
Condition<List<Tuple>> expectedTuples = tester.tupleContents(int32Filtered,
SPLStreamsTest.TEST_TUPLES[0],
SPLStreamsTest.TEST_TUPLES[2]
);
if (isStreamingAnalyticsRun())
getConfig().put(ContextProperties.FORCE_REMOTE_BUILD, true);
complete(tester, expectedCount, 10, TimeUnit.SECONDS);
assertTrue(expectedCount.toString(), expectedCount.valid());
assertTrue(expectedTuples.toString(), expectedTuples.valid());
}
/**
* Test we can add options to sc.
* A C++ primitive operator is used to
* detect C++11 setting and a #defined value
*/
@Test
public void testSCoptionsNoOpts() throws Exception {
_testSCoptions(null, "CPP98", "NOOPT");
}
@Test
public void testSCoptionsNoOpts2() throws Exception {
_testSCoptions(Collections.emptyList(), "CPP98", "NOOPT");
}
@Test
public void testSCoptionsSingle() throws Exception {
_testSCoptions("--c++std=c++11", "CPP11", "NOOPT");
}
@Test
public void testSCoptionsSingleList() throws Exception {
_testSCoptions(Collections.singletonList("--cxx-flags=-DSCOPT_TESTING=1"), "CPP98", "SCOPT");
}
@Test
public void testSCoptionsMulti() throws Exception {
List<String> opts = new ArrayList<>();
opts.add("--cxx-flags=-DSCOPT_TESTING=1");
opts.add("--c++std=c++11");
_testSCoptions(opts, "CPP11", "SCOPT");
}
private void _testSCoptions(Object options, String e1, String e2) throws Exception {
Topology topology = new Topology("testSCoptions");
SPLStream single = SPLStreams.stringToSPLStream(
topology.constants(Collections.singletonList("A")));
SPL.addToolkit(topology, new File(getTestRoot(), "spl/testtk"));
TStream<String> output = SPL.invokeOperator("SCO", "testspl::ScOptionTester", single,
single.getSchema(), Collections.emptyMap()).toStringStream();
if (options != null)
this.getConfig().put(ContextProperties.SC_OPTIONS, options);
Tester tester = topology.getTester();
Condition<Long> optC = tester.tupleCount(output, 2);
Condition<List<String>> optV = tester.stringContents(output, e1, e2);
complete(tester, optC, 10, TimeUnit.SECONDS);
assertTrue(optC.toString(), optC.valid());
assertTrue(optV.toString(), optV.valid());
}
/**
* Test we can invoke an SPL operator.
*/
@Test
public void testSPLOperatorMultipleOuptuts() throws Exception {
Topology topology = new Topology();
SPLStream tuples = SPLStreamsTest.testTupleStream(topology);
// Filter on the vi attribute, passing the value 321.
Map<String,Object> params = new HashMap<>();
params.put("attr", tuples.getSchema().getAttribute("vi"));
params.put("value", 321);
SPL.addToolkit(tuples, new File(getTestRoot(), "spl/testtk"));
SPL.addToolkitDependency(tuples, "com.ibm.streamsx.topology.testing.testtk", "0.9.9");
List<SPLStream> outputs = SPL.invokeOperator(
topology,
"testSPLOperatorMultipleOuptuts",
"testspl::Int32FilterPF",
Collections.singletonList(tuples),
Collections.nCopies(2, tuples.getSchema()),
params);
SPLStream int32Filtered = outputs.get(0);
SPLStream int32Dropped = outputs.get(1);
Tester tester = topology.getTester();
Condition<Long> expectedCount = tester.tupleCount(int32Dropped, 2);
Condition<List<Tuple>> expectedTuples = tester.tupleContents(int32Filtered,
SPLStreamsTest.TEST_TUPLES[0],
SPLStreamsTest.TEST_TUPLES[2]
);
Condition<List<Tuple>> droppedTuples = tester.tupleContents(int32Dropped,
SPLStreamsTest.TEST_TUPLES[1],
SPLStreamsTest.TEST_TUPLES[3]
);
complete(tester, expectedCount, 10, TimeUnit.SECONDS);
assertTrue(expectedCount.toString(), expectedCount.valid());
assertTrue(expectedTuples.toString(), expectedTuples.valid());
assertTrue(droppedTuples.toString(), droppedTuples.valid());
}
/**
* Test we can invoke an SPL operator with various parameter types.
*/
private void testOpParams(String testName, OpParamAdder opParamAdder) throws Exception {
Topology topology = new Topology(testName);
opParamAdder.init(topology, getConfig());
StreamSchema schema = Type.Factory.getStreamSchema(
"tuple<"
+ "rstring r"
+ ", ustring u"
+ ", boolean b"
+ ", int8 i8, int16 i16, int32 i32, int64 i64"
+ ", uint8 ui8, uint16 ui16, uint32 ui32, uint64 ui64"
+ ", float32 f32, float64 f64"
+ " >");
Map<String,Object> expectedValues = new HashMap<>();
Random rand = new Random();
String r = "test X\tY\"Lit\nerals\\nX\\tY " + rand.nextInt();
opParamAdder.put("r", r);
String u = "test X\tY\"Lit\nerals\\nX\\tY " + rand.nextInt();
opParamAdder.put("u", SPL.createValue(u, MetaType.USTRING));
expectedValues.put("r", new RString(r));
expectedValues.put("u", u);
boolean b = rand.nextBoolean();
opParamAdder.put("b", b);
expectedValues.put("b", b);
byte i8 = (byte) rand.nextInt();
short i16 = (short) rand.nextInt();
int i32 = rand.nextInt();
long i64 = rand.nextLong();
opParamAdder.put("i8", i8);
opParamAdder.put("i16", i16);
opParamAdder.put("i32", i32);
opParamAdder.put("i64", i64);
expectedValues.put("i8", i8);
expectedValues.put("i16", i16);
expectedValues.put("i32", i32);
expectedValues.put("i64", i64);
byte ui8 = (byte) 0xFF; // 255 => -1
short ui16 = (short) 0xFFFE; // 65534 => -2
int ui32 = 0xFFFFFFFD; // 4294967293 => -3
long ui64 = 0xFFFFFFFFFFFFFFFCL; // 18446744073709551612 => -4
opParamAdder.put("ui8", SPL.createValue(ui8, MetaType.UINT8));
opParamAdder.put("ui16", SPL.createValue(ui16, MetaType.UINT16));
opParamAdder.put("ui32", SPL.createValue(ui32, MetaType.UINT32));
opParamAdder.put("ui64", SPL.createValue(ui64, MetaType.UINT64));
expectedValues.put("ui8", ui8);
expectedValues.put("ui16", ui16);
expectedValues.put("ui32", ui32);
expectedValues.put("ui64", ui64);
float f32 = 4.0f;
double f64 = 32.0;
opParamAdder.put("f32", f32);
opParamAdder.put("f64", f64);
expectedValues.put("f32", f32);
expectedValues.put("f64", f64);
SPL.addToolkit(topology, new File(getTestRoot(), "spl/testtk"));
SPLStream paramTuple = SPL.invokeSource(topology, "testgen::TypeLiteralTester", opParamAdder.getParams(), schema);
Tuple expectedTuple = schema.getTuple(expectedValues);
Tester tester = topology.getTester();
Condition<Long> expectedCount = tester.tupleCount(paramTuple, 1);
Condition<?> contents = tester.tupleContents(paramTuple, expectedTuple);;
complete(tester, expectedCount.and(contents), 10, TimeUnit.SECONDS);
assertTrue(contents.valid());
assertTrue(expectedCount.valid());
}
/**
* Operator parameter adder.
* Base implementation adds as Literals.
*/
private static class OpParamAdder {
final Map<String,Object> params = new HashMap<>();
Topology top;
Map<String,Object> config;
void init(Topology top, Map<String,Object> config) {
this.top = top;
this.config = config;
}
void put(String opParamName, Object opParamValue) {
params.put(opParamName, opParamValue);
}
Map<String,Object> getParams() {
return params;
}
}
@Test
public void testParamLiterals() throws Exception {
// Test operator parameters with literal values
testOpParams("testParamLiterals", new OpParamAdder());
}
/**
* Test we can invoke an SPL operator with various parameter types,
* where the type is an optional type.
*/
private void testOpParamsOptionalTypes(String testName, OpParamAdder opParamAdder)
throws Exception {
Topology topology = new Topology(testName);
opParamAdder.init(topology, getConfig());
StreamSchema schema = Type.Factory.getStreamSchema(
"tuple<"
+ "rstring r"
+ ", optional<rstring> orv"
+ ", optional<rstring> ornv"
+ ", int32 i32"
+ ", optional<int32> oi32v"
+ ", optional<int32> oi32nv"
+ " >");
Map<String,Object> expectedValues = new HashMap<>();
Random rand = new Random();
String r = "test X\tY\"Lit\nerals\\nX\\tY " + rand.nextInt();
opParamAdder.put("r", r);
expectedValues.put("r", new RString(r));
String orv = "test X\tY\"Lit\nerals\\nX\\tY " + rand.nextInt();
opParamAdder.put("orv", orv);
// test setting optional type to null by using null in Map
opParamAdder.put("ornv", null);
expectedValues.put("orv", new RString(orv));
expectedValues.put("ornv", null);
int i32 = rand.nextInt();
opParamAdder.put("i32", i32);
int oi32v = rand.nextInt();
opParamAdder.put("oi32v", oi32v);
// test setting optional type to null by using createNullValue() in Map
opParamAdder.put("oi32nv", SPL.createNullValue());
expectedValues.put("i32", i32);
expectedValues.put("oi32v", oi32v);
expectedValues.put("oi32nv", null);
SPL.addToolkit(topology, new File(getTestRoot(), "spl/testtkopt"));
SPLStream paramTuple = SPL.invokeSource(topology, "testgen::TypeLiteralTester", opParamAdder.getParams(), schema);
Tester tester = topology.getTester();
Condition<Long> expectedCount = tester.tupleCount(paramTuple, 1);
Condition<?> contents = tester.tupleContents(paramTuple, schema.getTuple(expectedValues));
complete(tester, expectedCount.and(contents), 10, TimeUnit.SECONDS);
assertTrue(contents.valid());
assertTrue(expectedCount.valid());
}
@Test
public void testParamLiteralsOptionalTypes() throws Exception {
// Test operator parameters with literal values for optional types
assumeOptionalTypes();
testOpParamsOptionalTypes("testParamLiteralsOptionalTypes", new OpParamAdder());
}
@Test
public void testSubmissionParamsWithDefault() throws Exception {
// Test operator parameters with submission time values with defaults
testOpParams("testSubmissionParamsWithDefault", new OpParamAdder() {
void put(String opParamName, Object opParamValue) {
Supplier<?> sp;
if (!(opParamValue instanceof JsonObject))
sp = top.createSubmissionParameter(opParamName, opParamValue);
else
sp = SPL.createSubmissionParameter(top, opParamName, opParamValue, true);
params.put(opParamName, sp);
}
});
}
@Test
public void testSubmissionParamsWithoutDefault() throws Exception {
// Test operator parameters with submission time values without defaults
testOpParams("testSubmissionParamsWithoutDefault", new OpParamAdder() {
void put(String opParamName, Object opParamValue) {
Supplier<?> sp;
if (!(opParamValue instanceof JsonObject))
sp = top.createSubmissionParameter(opParamName,
(Class<?>)opParamValue.getClass());
else
sp = SPL.createSubmissionParameter(top, opParamName, opParamValue, false);
params.put(opParamName, sp);
@SuppressWarnings("unchecked")
Map<String,Object> submitParams = (Map<String,Object>) config.get(ContextProperties.SUBMISSION_PARAMS);
if (submitParams == null) {
submitParams = new HashMap<>();
config.put(ContextProperties.SUBMISSION_PARAMS, submitParams);
}
if (!(opParamValue instanceof JsonObject))
submitParams.put(opParamName, opParamValue);
else
submitParams.put(opParamName, pvToStr((JsonObject)opParamValue));
}
});
}
private String pvToStr(JsonObject jo) {
// A Client of the API shouldn't find itself in
// a place to need this. It's just an artifact of
// the way these tests are composed plus lack of a
// public form of valueToString(SPL.createValue(...)).
String type = jo.get("type").getAsString();
if (!"__spl_value".equals(type))
throw new IllegalArgumentException("jo " + jo);
JsonObject value = jo.get("value").getAsJsonObject();
String metaType = value.get("metaType").getAsString();
JsonElement v = value.get("value");
return v.getAsString();
}
}
| |
/*
* Copyright 2015 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.compiler.rule.builder.dialect.java;
import org.drools.compiler.compiler.BoundIdentifiers;
import org.drools.compiler.compiler.DescrBuildError;
import org.drools.compiler.lang.descr.BaseDescr;
import org.drools.compiler.lang.descr.RuleDescr;
import org.drools.compiler.rule.builder.RuleBuildContext;
import org.drools.core.definitions.rule.impl.RuleImpl;
import org.drools.core.reteoo.RuleTerminalNode;
import org.drools.core.rule.Declaration;
import org.drools.core.rule.JavaDialectRuntimeData;
import org.drools.core.spi.AcceptsClassObjectType;
import org.drools.core.spi.DeclarationScopeResolver;
import org.drools.core.spi.KnowledgeHelper;
import org.drools.core.util.StringUtils;
import org.mvel2.integration.impl.MapVariableResolverFactory;
import org.mvel2.templates.SimpleTemplateRegistry;
import org.mvel2.templates.TemplateCompiler;
import org.mvel2.templates.TemplateRegistry;
import org.mvel2.templates.TemplateRuntime;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InputStream;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
public final class JavaRuleBuilderHelper {
private static final Logger logger = LoggerFactory.getLogger(JavaRuleBuilderHelper.class);
protected static TemplateRegistry RULE_REGISTRY = new SimpleTemplateRegistry();
protected static TemplateRegistry INVOKER_REGISTRY = new SimpleTemplateRegistry();
protected static String JAVA_RULE_MVEL = "javaRule.mvel";
protected static String JAVA_INVOKERS_MVEL = "javaInvokers.mvel";
public static void setConsequenceTemplate( String name ) {
JAVA_RULE_MVEL = name;
RULE_REGISTRY = new SimpleTemplateRegistry();
}
public static void setInvokerTemplate( String name ) {
JAVA_INVOKERS_MVEL = name;
INVOKER_REGISTRY = new SimpleTemplateRegistry();
}
public static synchronized TemplateRegistry getRuleTemplateRegistry(ClassLoader cl) {
if ( !RULE_REGISTRY.contains( "rules" ) ) {
InputStream javaRuleMvelStream = JavaRuleBuilderHelper.class.getResourceAsStream( JAVA_RULE_MVEL );
RULE_REGISTRY.addNamedTemplate( "rules",
TemplateCompiler.compileTemplate( javaRuleMvelStream ) );
try {
javaRuleMvelStream.close();
} catch ( IOException ex ) {
logger.debug( "Failed to close stream!", ex );
}
TemplateRuntime.execute( RULE_REGISTRY.getNamedTemplate( "rules" ),
null,
RULE_REGISTRY );
}
return RULE_REGISTRY;
}
public static synchronized TemplateRegistry getInvokerTemplateRegistry(ClassLoader cl) {
if ( !INVOKER_REGISTRY.contains( "invokers" ) ) {
InputStream javaInvokersMvelStream = JavaRuleBuilderHelper.class.getResourceAsStream( JAVA_INVOKERS_MVEL );
INVOKER_REGISTRY.addNamedTemplate( "invokers",
TemplateCompiler.compileTemplate( javaInvokersMvelStream ) );
try {
javaInvokersMvelStream.close();
} catch ( IOException ex ) {
logger.debug( "Failed to close stream!", ex );
}
TemplateRuntime.execute( INVOKER_REGISTRY.getNamedTemplate( "invokers" ),
null,
INVOKER_REGISTRY );
}
return INVOKER_REGISTRY;
}
public static JavaAnalysisResult createJavaAnalysisResult(final RuleBuildContext context,
String consequenceName,
Map<String, Declaration> decls) {
final RuleDescr ruleDescr = context.getRuleDescr();
BoundIdentifiers bindings = new BoundIdentifiers(DeclarationScopeResolver.getDeclarationClasses( decls ),
context.getKnowledgeBuilder().getGlobals(),
null,
KnowledgeHelper.class );
String consequenceStr = ( RuleImpl.DEFAULT_CONSEQUENCE_NAME.equals( consequenceName ) ) ?
(String) ruleDescr.getConsequence() :
(String) ruleDescr.getNamedConsequences().get( consequenceName );
consequenceStr = consequenceStr + "\n";
return ( JavaAnalysisResult) context.getDialect().analyzeBlock( context,
ruleDescr,
consequenceStr,
bindings );
}
public static Map<String, Object> createConsequenceContext(final RuleBuildContext context,
String consequenceName,
String className,
String consequenceText,
Map<String, Declaration> decls,
final BoundIdentifiers usedIdentifiers) {
final Declaration[] declarations = new Declaration[usedIdentifiers.getDeclrClasses().size()];
String[] declrStr = new String[declarations.length];
int j = 0;
for (String str : usedIdentifiers.getDeclrClasses().keySet() ) {
declrStr[j] = str;
declarations[j++] = decls.get( str );
}
Arrays.sort( declarations, RuleTerminalNode.SortDeclarations.instance );
for ( int i = 0; i < declrStr.length; i++) {
declrStr[i] = declarations[i].getIdentifier();
}
context.getRule().setRequiredDeclarationsForConsequence( consequenceName, declrStr );
final Map<String, Object> map = createVariableContext( className,
consequenceText,
context,
declarations,
null,
usedIdentifiers.getGlobals()
);
map.put( "consequenceName", consequenceName );
//final int[] indexes = new int[declarations.length];
final Integer[] indexes = new Integer[declarations.length];
final Boolean[] notPatterns = new Boolean[declarations.length];
for ( int i = 0, length = declarations.length; i < length; i++ ) {
indexes[i] = i;
notPatterns[i] = (declarations[i].getExtractor() instanceof AcceptsClassObjectType) ? Boolean.FALSE : Boolean.TRUE ;
if (indexes[i] == -1 ) {
context.addError( new DescrBuildError( context.getParentDescr(),
context.getRuleDescr(),
null,
"Internal Error : Unable to find declaration in list while generating the consequence invoker" ) );
}
}
map.put( "indexes",
indexes );
map.put( "notPatterns",
notPatterns );
return map;
}
public static Map<String, Object> createVariableContext(final String className,
final String text,
final RuleBuildContext context,
final Declaration[] declarations,
final Declaration[] localDeclarations,
final Map<String, Class<?>> globals) {
final Map<String, Object> map = new HashMap<String, Object>();
map.put( "className",
className );
map.put( "methodName",
className );
map.put( "package",
context.getPkg().getName() );
map.put( "ruleClassName",
StringUtils.ucFirst( context.getRuleDescr().getClassName() ) );
map.put( "invokerClassName",
context.getRuleDescr().getClassName() + StringUtils.ucFirst( className ) + "Invoker" );
if ( text != null ) {
map.put( "text",
text );
map.put( "hashCode",
text.hashCode());
}
map.put( "declarations",
declarations );
if ( localDeclarations != null ) {
map.put( "localDeclarations",
localDeclarations );
}
String[] globalStr = new String[globals.size()];
String[] globalTypes = new String[globals.size()];
int i = 0;
for ( Entry<String, Class< ? >> entry : globals.entrySet() ) {
globalStr[i] = entry.getKey();
globalTypes[i] = entry.getValue().getName().replace( '$',
'.' );
i++;
}
map.put( "globals",
globalStr );
map.put( "globalTypes",
globalTypes );
return map;
}
public static void generateTemplates(final String ruleTemplate,
final String invokerTemplate,
final RuleBuildContext context,
final String className,
final Map vars,
final Object invokerLookup,
final BaseDescr descrLookup) {
generateMethodTemplate(ruleTemplate, context, vars);
generateInvokerTemplate(invokerTemplate, context, className, vars, invokerLookup, descrLookup);
}
public static void generateMethodTemplate(final String ruleTemplate, final RuleBuildContext context, final Map vars) {
TemplateRegistry registry = getRuleTemplateRegistry(context.getKnowledgeBuilder().getRootClassLoader());
context.addMethod((String) TemplateRuntime.execute( registry.getNamedTemplate(ruleTemplate),
null,
new MapVariableResolverFactory(vars),
registry) );
}
public static void generateInvokerTemplate(final String invokerTemplate,
final RuleBuildContext context,
final String className,
final Map vars,
final Object invokerLookup,
final BaseDescr descrLookup) {
TemplateRegistry registry = getInvokerTemplateRegistry(context.getKnowledgeBuilder().getRootClassLoader());
final String invokerClassName = context.getPkg().getName() + "." + context.getRuleDescr().getClassName() + StringUtils.ucFirst( className ) + "Invoker";
context.getInvokers().put( invokerClassName,
(String) TemplateRuntime.execute( registry.getNamedTemplate( invokerTemplate ),
null,
new MapVariableResolverFactory( vars ),
registry ) );
context.getInvokerLookups().put( invokerClassName,
invokerLookup );
context.getDescrLookups().put( invokerClassName,
descrLookup );
}
public static void registerInvokerBytecode(RuleBuildContext context, Map<String, Object> vars, byte[] bytecode, Object invokerLookup) {
String packageName = (String)vars.get("package");
String invokerClassName = (String)vars.get("invokerClassName");
String className = packageName + "." + invokerClassName;
String resourceName = className.replace('.', '/') + ".class";
JavaDialectRuntimeData data = (JavaDialectRuntimeData)context.getPkg().getDialectRuntimeRegistry().getDialectData("java");
data.write(resourceName, bytecode);
data.putInvoker(className, invokerLookup);
}
}
| |
// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.components.autofill_assistant;
import android.animation.ValueAnimator;
import android.view.animation.DecelerateInterpolator;
import androidx.annotation.Nullable;
import org.chromium.base.Callback;
import org.chromium.base.MathUtils;
import org.chromium.components.browser_ui.bottomsheet.BottomSheetController;
import org.chromium.components.browser_ui.bottomsheet.BottomSheetController.SheetState;
import org.chromium.components.browser_ui.bottomsheet.BottomSheetObserver;
import org.chromium.components.browser_ui.bottomsheet.EmptyBottomSheetObserver;
import org.chromium.content_public.browser.GestureStateListenerWithScroll;
/**
* A Gesture listener that implements scroll-to-hide for the assistant bottomsheet when in FULL
* state.
*/
public class ScrollToHideGestureListener implements GestureStateListenerWithScroll {
/** Base duration of the animation of the sheet. 218 ms is a spec for material design. */
private static final int BASE_ANIMATION_DURATION_MS = 218;
private final BottomSheetController mBottomSheetController;
private final AssistantBottomSheetContent mContent;
@Nullable
private final BottomSheetObserver mStateChangeTracker = new StateChangeTracker();
private boolean mScrolling;
/** Remembers the last value of scroll offset, to compute the delta for the next move. */
private int mLastScrollOffsetY;
/**
* A capture of {@code mBottomSheetController.getCurrentOffset()}. At the end of a scroll, it is
* compared with the current value to figure out whether the sheet was overall scrolled up or
* down.
*/
private float mOffsetMarkPx;
/** This animator moves the sheet to its final position after scrolling ended. */
private ValueAnimator mAnimator;
/**
* The offset the animator is moving towards. Only relevant when {@code mAnimator} is active.
*/
private int mAnimatorGoalOffsetPx;
public ScrollToHideGestureListener(
BottomSheetController bottomSheetController, AssistantBottomSheetContent content) {
mBottomSheetController = bottomSheetController;
mContent = content;
}
/** True while scrolling. */
public boolean isScrolling() {
return mScrolling;
}
/** True if the sheet was hidden. */
public boolean isSheetHidden() {
return mBottomSheetController.getSheetState() == SheetState.FULL
&& mBottomSheetController.getCurrentOffset() == 0;
}
/** True if the sheet is currently hiding or expanding after a scroll. */
public boolean isSheetSettling() {
return mBottomSheetController.getSheetState() == SheetState.FULL && mAnimator != null
&& mAnimator.isStarted();
}
@Override
public void onScrollStarted(int scrollOffsetY, int scrollExtentY) {
Callback<Integer> offsetController = mContent.getOffsetController();
if (offsetController == null) return;
// Scroll to hide only applies if the sheet is fully opened, and state is FULL or is being
// opened, and target state is FULL.
if (mBottomSheetController.getTargetSheetState() == SheetState.FULL) {
// This stops animation and freezes the sheet in place.
offsetController.onResult(mBottomSheetController.getCurrentOffset());
}
if (mBottomSheetController.getSheetState() != SheetState.FULL) return;
resetScrollingState(); // also cancels any running animations
mScrolling = true;
mLastScrollOffsetY = scrollOffsetY;
mOffsetMarkPx = mBottomSheetController.getCurrentOffset();
mBottomSheetController.addObserver(mStateChangeTracker);
}
@Override
public void onScrollEnded(int scrollOffsetY, int scrollExtentY) {
onScrollOffsetOrExtentChanged(scrollOffsetY, scrollExtentY);
if (!mScrolling) return;
resetScrollingState();
int maxOffsetPx = getMaxOffsetPx();
int currentOffsetPx = mBottomSheetController.getCurrentOffset();
if (currentOffsetPx == 0 || currentOffsetPx == maxOffsetPx) {
return;
}
if (currentOffsetPx >= mOffsetMarkPx || scrollOffsetY == 0) {
animateTowards(maxOffsetPx);
} else {
animateTowards(0);
}
}
@Override
public void onScrollOffsetOrExtentChanged(int scrollOffsetY, int scrollExtentY) {
if (!mScrolling) {
// It's possible for the scroll offset to reset to 0 outside of a scroll, if the page or
// viewport size change. Scrolling up is not possible so if the sheet is hidden or about
// to be hidden, show it.
if (scrollOffsetY == 0
&& (isSheetHidden() || (isSheetSettling() && mAnimatorGoalOffsetPx == 0))) {
animateTowards(getMaxOffsetPx());
}
return;
}
Callback<Integer> offsetController = mContent.getOffsetController();
if (offsetController == null) {
resetScrollingState();
return;
}
// deltaPx is the value to add to the current sheet offset (height). It is negative when
// scrolling down, that is, when scrollOffsetY increases.
int deltaPx = mLastScrollOffsetY - scrollOffsetY;
mLastScrollOffsetY = scrollOffsetY;
int maxOffsetPx = getMaxOffsetPx();
int offsetPx = MathUtils.clamp(
mBottomSheetController.getCurrentOffset() + deltaPx, 0, maxOffsetPx);
offsetController.onResult(offsetPx);
// If either extremes were reached, update the mark. The decision to fully show or hide will
// be relative to that point.
if (offsetPx == 0) {
mOffsetMarkPx = 0;
} else if (offsetPx >= maxOffsetPx) {
mOffsetMarkPx = maxOffsetPx;
}
}
@Override
public void onFlingStartGesture(int scrollOffsetY, int scrollExtentY) {
// Flinging and scrolling are handled the same, the sheet follows the movement of the
// browser page.
onScrollStarted(scrollOffsetY, scrollExtentY);
}
@Override
public void onFlingEndGesture(int scrollOffsetY, int scrollExtentY) {
onScrollEnded(scrollOffsetY, scrollExtentY);
}
@Override
public void onDestroyed() {
resetScrollingState();
}
private int getMaxOffsetPx() {
return mContent.getContentView().getHeight();
}
private void resetScrollingState() {
mScrolling = false;
mLastScrollOffsetY = 0;
cancelAnimation();
mBottomSheetController.removeObserver(mStateChangeTracker);
}
private void cancelAnimation() {
if (mAnimator == null) return;
mAnimator.cancel();
mAnimator = null;
}
/** Animate the sheet towards {@code goalOffsetPx} without changing its state. */
private void animateTowards(int goalOffsetPx) {
Callback<Integer> offsetController = mContent.getOffsetController();
if (offsetController == null) return;
ValueAnimator animator =
ValueAnimator.ofInt(mBottomSheetController.getCurrentOffset(), goalOffsetPx);
animator.setDuration(BASE_ANIMATION_DURATION_MS);
animator.setInterpolator(new DecelerateInterpolator(1.0f));
animator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() {
@Override
public void onAnimationUpdate(ValueAnimator animator) {
if (mAnimator != animator) return;
offsetController.onResult((Integer) animator.getAnimatedValue());
}
});
mAnimator = animator;
mAnimatorGoalOffsetPx = goalOffsetPx;
mAnimator.start();
}
/** Stop scrolling if the sheet leaves the FULL state during scrolling. */
private class StateChangeTracker extends EmptyBottomSheetObserver {
@Override
public void onSheetStateChanged(@SheetState int newState, int reason) {
if (newState != SheetState.FULL) {
resetScrollingState();
}
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.customerinsights.models;
import com.azure.core.annotation.Fluent;
import com.azure.core.util.logging.ClientLogger;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.time.OffsetDateTime;
import java.util.List;
import java.util.Map;
/** Describes an entity. */
@Fluent
public class EntityTypeDefinition extends MetadataDefinitionBase {
@JsonIgnore private final ClientLogger logger = new ClientLogger(EntityTypeDefinition.class);
/*
* The api entity set name. This becomes the odata entity set name for the
* entity Type being referred in this object.
*/
@JsonProperty(value = "apiEntitySetName")
private String apiEntitySetName;
/*
* Type of entity.
*/
@JsonProperty(value = "entityType")
private EntityTypes entityType;
/*
* The properties of the Profile.
*/
@JsonProperty(value = "fields")
private List<PropertyDefinition> fields;
/*
* The instance count.
*/
@JsonProperty(value = "instancesCount")
private Integer instancesCount;
/*
* The last changed time for the type definition.
*/
@JsonProperty(value = "lastChangedUtc", access = JsonProperty.Access.WRITE_ONLY)
private OffsetDateTime lastChangedUtc;
/*
* Provisioning state.
*/
@JsonProperty(value = "provisioningState", access = JsonProperty.Access.WRITE_ONLY)
private ProvisioningStates provisioningState;
/*
* The schema org link. This helps ACI identify and suggest semantic
* models.
*/
@JsonProperty(value = "schemaItemTypeLink")
private String schemaItemTypeLink;
/*
* The hub name.
*/
@JsonProperty(value = "tenantId", access = JsonProperty.Access.WRITE_ONLY)
private String tenantId;
/*
* The timestamp property name. Represents the time when the interaction or
* profile update happened.
*/
@JsonProperty(value = "timestampFieldName")
private String timestampFieldName;
/*
* The name of the entity.
*/
@JsonProperty(value = "typeName")
private String typeName;
/**
* Get the apiEntitySetName property: The api entity set name. This becomes the odata entity set name for the entity
* Type being referred in this object.
*
* @return the apiEntitySetName value.
*/
public String apiEntitySetName() {
return this.apiEntitySetName;
}
/**
* Set the apiEntitySetName property: The api entity set name. This becomes the odata entity set name for the entity
* Type being referred in this object.
*
* @param apiEntitySetName the apiEntitySetName value to set.
* @return the EntityTypeDefinition object itself.
*/
public EntityTypeDefinition withApiEntitySetName(String apiEntitySetName) {
this.apiEntitySetName = apiEntitySetName;
return this;
}
/**
* Get the entityType property: Type of entity.
*
* @return the entityType value.
*/
public EntityTypes entityType() {
return this.entityType;
}
/**
* Set the entityType property: Type of entity.
*
* @param entityType the entityType value to set.
* @return the EntityTypeDefinition object itself.
*/
public EntityTypeDefinition withEntityType(EntityTypes entityType) {
this.entityType = entityType;
return this;
}
/**
* Get the fields property: The properties of the Profile.
*
* @return the fields value.
*/
public List<PropertyDefinition> fields() {
return this.fields;
}
/**
* Set the fields property: The properties of the Profile.
*
* @param fields the fields value to set.
* @return the EntityTypeDefinition object itself.
*/
public EntityTypeDefinition withFields(List<PropertyDefinition> fields) {
this.fields = fields;
return this;
}
/**
* Get the instancesCount property: The instance count.
*
* @return the instancesCount value.
*/
public Integer instancesCount() {
return this.instancesCount;
}
/**
* Set the instancesCount property: The instance count.
*
* @param instancesCount the instancesCount value to set.
* @return the EntityTypeDefinition object itself.
*/
public EntityTypeDefinition withInstancesCount(Integer instancesCount) {
this.instancesCount = instancesCount;
return this;
}
/**
* Get the lastChangedUtc property: The last changed time for the type definition.
*
* @return the lastChangedUtc value.
*/
public OffsetDateTime lastChangedUtc() {
return this.lastChangedUtc;
}
/**
* Get the provisioningState property: Provisioning state.
*
* @return the provisioningState value.
*/
public ProvisioningStates provisioningState() {
return this.provisioningState;
}
/**
* Get the schemaItemTypeLink property: The schema org link. This helps ACI identify and suggest semantic models.
*
* @return the schemaItemTypeLink value.
*/
public String schemaItemTypeLink() {
return this.schemaItemTypeLink;
}
/**
* Set the schemaItemTypeLink property: The schema org link. This helps ACI identify and suggest semantic models.
*
* @param schemaItemTypeLink the schemaItemTypeLink value to set.
* @return the EntityTypeDefinition object itself.
*/
public EntityTypeDefinition withSchemaItemTypeLink(String schemaItemTypeLink) {
this.schemaItemTypeLink = schemaItemTypeLink;
return this;
}
/**
* Get the tenantId property: The hub name.
*
* @return the tenantId value.
*/
public String tenantId() {
return this.tenantId;
}
/**
* Get the timestampFieldName property: The timestamp property name. Represents the time when the interaction or
* profile update happened.
*
* @return the timestampFieldName value.
*/
public String timestampFieldName() {
return this.timestampFieldName;
}
/**
* Set the timestampFieldName property: The timestamp property name. Represents the time when the interaction or
* profile update happened.
*
* @param timestampFieldName the timestampFieldName value to set.
* @return the EntityTypeDefinition object itself.
*/
public EntityTypeDefinition withTimestampFieldName(String timestampFieldName) {
this.timestampFieldName = timestampFieldName;
return this;
}
/**
* Get the typeName property: The name of the entity.
*
* @return the typeName value.
*/
public String typeName() {
return this.typeName;
}
/**
* Set the typeName property: The name of the entity.
*
* @param typeName the typeName value to set.
* @return the EntityTypeDefinition object itself.
*/
public EntityTypeDefinition withTypeName(String typeName) {
this.typeName = typeName;
return this;
}
/** {@inheritDoc} */
@Override
public EntityTypeDefinition withAttributes(Map<String, List<String>> attributes) {
super.withAttributes(attributes);
return this;
}
/** {@inheritDoc} */
@Override
public EntityTypeDefinition withDescription(Map<String, String> description) {
super.withDescription(description);
return this;
}
/** {@inheritDoc} */
@Override
public EntityTypeDefinition withDisplayName(Map<String, String> displayName) {
super.withDisplayName(displayName);
return this;
}
/** {@inheritDoc} */
@Override
public EntityTypeDefinition withLocalizedAttributes(Map<String, Map<String, String>> localizedAttributes) {
super.withLocalizedAttributes(localizedAttributes);
return this;
}
/** {@inheritDoc} */
@Override
public EntityTypeDefinition withSmallImage(String smallImage) {
super.withSmallImage(smallImage);
return this;
}
/** {@inheritDoc} */
@Override
public EntityTypeDefinition withMediumImage(String mediumImage) {
super.withMediumImage(mediumImage);
return this;
}
/** {@inheritDoc} */
@Override
public EntityTypeDefinition withLargeImage(String largeImage) {
super.withLargeImage(largeImage);
return this;
}
/**
* Validates the instance.
*
* @throws IllegalArgumentException thrown if the instance is not valid.
*/
@Override
public void validate() {
super.validate();
if (fields() != null) {
fields().forEach(e -> e.validate());
}
}
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* @author max
*/
package com.intellij.codeInsight.daemon.impl;
import com.intellij.codeHighlighting.Pass;
import com.intellij.codeHighlighting.TextEditorHighlightingPass;
import com.intellij.codeInsight.daemon.*;
import com.intellij.codeInsight.daemon.impl.analysis.HighlightingLevelManager;
import com.intellij.injected.editor.DocumentWindow;
import com.intellij.lang.Language;
import com.intellij.lang.injection.InjectedLanguageManager;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.colors.CodeInsightColors;
import com.intellij.openapi.editor.colors.EditorColorsManager;
import com.intellij.openapi.editor.colors.EditorColorsScheme;
import com.intellij.openapi.editor.markup.GutterIconRenderer;
import com.intellij.openapi.editor.markup.SeparatorPlacement;
import com.intellij.openapi.progress.EmptyProgressIndicator;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.project.DumbAware;
import com.intellij.openapi.project.DumbService;
import com.intellij.openapi.project.IndexNotReadyException;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.TextRange;
import com.intellij.psi.FileViewProvider;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.impl.source.tree.injected.InjectedFileViewProvider;
import com.intellij.psi.impl.source.tree.injected.InjectedLanguageUtil;
import com.intellij.util.FunctionUtil;
import com.intellij.util.PairConsumer;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.NotNullList;
import gnu.trove.THashSet;
import gnu.trove.TIntObjectHashMap;
import org.jetbrains.annotations.NotNull;
import javax.swing.*;
import java.util.*;
public class LineMarkersPass extends TextEditorHighlightingPass implements DumbAware {
private static final Logger LOG = Logger.getInstance("#com.intellij.codeInsight.daemon.impl.LineMarkersPass");
private volatile List<LineMarkerInfo> myMarkers = Collections.emptyList();
@NotNull private final PsiFile myFile;
@NotNull private final TextRange myPriorityBounds;
@NotNull private final TextRange myRestrictRange;
LineMarkersPass(@NotNull Project project,
@NotNull PsiFile file,
@NotNull Document document,
@NotNull TextRange priorityBounds,
@NotNull TextRange restrictRange) {
super(project, document, false);
myFile = file;
myPriorityBounds = priorityBounds;
myRestrictRange = restrictRange;
}
@NotNull
@Override
public Document getDocument() {
//noinspection ConstantConditions
return super.getDocument();
}
@Override
public void doApplyInformationToEditor() {
try {
LineMarkersUtil.setLineMarkersToEditor(myProject, getDocument(), myRestrictRange, myMarkers, getId());
}
catch (IndexNotReadyException ignored) {
}
}
@Override
public void doCollectInformation(@NotNull ProgressIndicator progress) {
final List<LineMarkerInfo> lineMarkers = new ArrayList<>();
FileViewProvider viewProvider = myFile.getViewProvider();
for (Language language : viewProvider.getLanguages()) {
final PsiFile root = viewProvider.getPsi(language);
HighlightingLevelManager highlightingLevelManager = HighlightingLevelManager.getInstance(myProject);
if (!highlightingLevelManager.shouldHighlight(root)) continue;
Divider.divideInsideAndOutsideInOneRoot(root, myRestrictRange, myPriorityBounds,
elements -> {
Collection<LineMarkerProvider> providers = getMarkerProviders(language, myProject);
List<LineMarkerProvider> providersList = new ArrayList<>(providers);
queryProviders(elements.inside, root, providersList, (element, info) -> {
lineMarkers.add(info);
ApplicationManager.getApplication().invokeLater(() -> {
if (isValid()) {
LineMarkersUtil.addLineMarkerToEditorIncrementally(myProject, getDocument(), info);
}
}, myProject.getDisposed());
});
queryProviders(elements.outside, root, providersList, (element, info) -> lineMarkers.add(info));
return true;
});
}
myMarkers = mergeLineMarkers(lineMarkers, getDocument());
if (LOG.isDebugEnabled()) {
LOG.debug("LineMarkersPass.doCollectInformation. lineMarkers: " + lineMarkers+"; merged: "+myMarkers);
}
}
@NotNull
private static List<LineMarkerInfo> mergeLineMarkers(@NotNull List<LineMarkerInfo> markers, @NotNull Document document) {
List<MergeableLineMarkerInfo> forMerge = new ArrayList<>();
TIntObjectHashMap<List<MergeableLineMarkerInfo>> sameLineMarkers = new TIntObjectHashMap<>();
for (int i = markers.size() - 1; i >= 0; i--) {
LineMarkerInfo marker = markers.get(i);
if (marker instanceof MergeableLineMarkerInfo) {
MergeableLineMarkerInfo mergeable = (MergeableLineMarkerInfo)marker;
forMerge.add(mergeable);
markers.remove(i);
int line = document.getLineNumber(marker.startOffset);
List<MergeableLineMarkerInfo> infos = sameLineMarkers.get(line);
if (infos == null) {
infos = new ArrayList<>();
sameLineMarkers.put(line, infos);
}
infos.add(mergeable);
}
}
if (forMerge.isEmpty()) return markers;
List<LineMarkerInfo> result = new ArrayList<>(markers);
for (Object v : sameLineMarkers.getValues()) {
List<MergeableLineMarkerInfo> infos = (List<MergeableLineMarkerInfo>)v;
result.addAll(MergeableLineMarkerInfo.merge(infos));
}
return result;
}
@NotNull
public static List<LineMarkerProvider> getMarkerProviders(@NotNull Language language, @NotNull final Project project) {
List<LineMarkerProvider> forLanguage = LineMarkerProviders.INSTANCE.allForLanguageOrAny(language);
List<LineMarkerProvider> providers = DumbService.getInstance(project).filterByDumbAwareness(forLanguage);
final LineMarkerSettings settings = LineMarkerSettings.getSettings();
return ContainerUtil.filter(providers, provider -> !(provider instanceof LineMarkerProviderDescriptor)
|| settings.isEnabled((LineMarkerProviderDescriptor)provider));
}
private static void queryProviders(@NotNull List<PsiElement> elements,
@NotNull PsiFile containingFile,
@NotNull List<LineMarkerProvider> providers,
@NotNull PairConsumer<PsiElement, LineMarkerInfo> consumer) {
ApplicationManager.getApplication().assertReadAccessAllowed();
Set<PsiFile> visitedInjectedFiles = new THashSet<>();
//noinspection ForLoopReplaceableByForEach
for (int i = 0; i < elements.size(); i++) {
PsiElement element = elements.get(i);
//noinspection ForLoopReplaceableByForEach
for (int j = 0; j < providers.size(); j++) {
ProgressManager.checkCanceled();
LineMarkerProvider provider = providers.get(j);
LineMarkerInfo info;
try {
info = provider.getLineMarkerInfo(element);
}
catch (ProcessCanceledException | IndexNotReadyException e) {
throw e;
}
catch (Exception e) {
LOG.error(e);
continue;
}
if (info != null) {
consumer.consume(element, info);
}
}
queryLineMarkersForInjected(element, containingFile, visitedInjectedFiles, consumer);
}
List<LineMarkerInfo> slowLineMarkers = new NotNullList<>();
//noinspection ForLoopReplaceableByForEach
for (int j = 0; j < providers.size(); j++) {
ProgressManager.checkCanceled();
LineMarkerProvider provider = providers.get(j);
try {
provider.collectSlowLineMarkers(elements, slowLineMarkers);
}
catch (ProcessCanceledException | IndexNotReadyException e) {
throw e;
}
catch (Exception e) {
LOG.error(e);
continue;
}
if (!slowLineMarkers.isEmpty()) {
//noinspection ForLoopReplaceableByForEach
for (int k = 0; k < slowLineMarkers.size(); k++) {
LineMarkerInfo slowInfo = slowLineMarkers.get(k);
PsiElement element = slowInfo.getElement();
consumer.consume(element, slowInfo);
}
slowLineMarkers.clear();
}
}
}
private static void queryLineMarkersForInjected(@NotNull PsiElement element,
@NotNull final PsiFile containingFile,
@NotNull Set<PsiFile> visitedInjectedFiles,
@NotNull final PairConsumer<PsiElement, LineMarkerInfo> consumer) {
if (containingFile.getViewProvider() instanceof InjectedFileViewProvider) return;
final InjectedLanguageManager manager = InjectedLanguageManager.getInstance(containingFile.getProject());
InjectedLanguageUtil.enumerate(element, containingFile, false, (injectedPsi, places) -> {
if (!visitedInjectedFiles.add(injectedPsi)) return; // there may be several concatenated literals making the one injected file
final Project project = injectedPsi.getProject();
Document document = PsiDocumentManager.getInstance(project).getCachedDocument(injectedPsi);
if (!(document instanceof DocumentWindow)) return;
List<PsiElement> injElements = CollectHighlightsUtil.getElementsInRange(injectedPsi, 0, injectedPsi.getTextLength());
final List<LineMarkerProvider> providers = getMarkerProviders(injectedPsi.getLanguage(), project);
queryProviders(injElements, injectedPsi, providers, (injectedElement, injectedMarker) -> {
GutterIconRenderer gutterRenderer = injectedMarker.createGutterRenderer();
TextRange injectedRange = new TextRange(injectedMarker.startOffset, injectedMarker.endOffset);
List<TextRange> editables = manager.intersectWithAllEditableFragments(injectedPsi, injectedRange);
for (TextRange editable : editables) {
TextRange hostRange = manager.injectedToHost(injectedPsi, editable);
Icon icon = gutterRenderer == null ? null : gutterRenderer.getIcon();
GutterIconNavigationHandler<PsiElement> navigationHandler = injectedMarker.getNavigationHandler();
LineMarkerInfo<PsiElement> converted =
new LineMarkerInfo<>(injectedElement, hostRange, icon, injectedMarker.updatePass,
e -> injectedMarker.getLineMarkerTooltip(), navigationHandler,
GutterIconRenderer.Alignment.RIGHT);
consumer.consume(injectedElement, converted);
}
});
});
}
@NotNull
public static Collection<LineMarkerInfo> queryLineMarkers(@NotNull PsiFile file, @NotNull Document document) {
if (file.getNode() == null) {
// binary file? see IDEADEV-2809
return Collections.emptyList();
}
LineMarkersPass pass = new LineMarkersPass(file.getProject(), file, document, file.getTextRange(), file.getTextRange());
pass.doCollectInformation(new EmptyProgressIndicator());
return pass.myMarkers;
}
@NotNull
public static LineMarkerInfo createMethodSeparatorLineMarker(@NotNull PsiElement startFrom, @NotNull EditorColorsManager colorsManager) {
LineMarkerInfo info = new LineMarkerInfo<>(
startFrom,
startFrom.getTextRange(),
null,
Pass.LINE_MARKERS,
FunctionUtil.<Object, String>nullConstant(),
null,
GutterIconRenderer.Alignment.RIGHT
);
EditorColorsScheme scheme = colorsManager.getGlobalScheme();
info.separatorColor = scheme.getColor(CodeInsightColors.METHOD_SEPARATORS_COLOR);
info.separatorPlacement = SeparatorPlacement.TOP;
return info;
}
@Override
public String toString() {
return super.toString() + "; myBounds: " + myPriorityBounds;
}
}
| |
package com.intellij.dupLocator.treeHash;
import com.intellij.dupLocator.*;
import com.intellij.dupLocator.equivalence.EquivalenceDescriptor;
import com.intellij.dupLocator.equivalence.EquivalenceDescriptorProvider;
import com.intellij.dupLocator.iterators.FilteringNodeIterator;
import com.intellij.dupLocator.iterators.NodeIterator;
import com.intellij.dupLocator.iterators.SiblingNodeIterator;
import com.intellij.dupLocator.util.DuplocatorUtil;
import com.intellij.dupLocator.util.NodeFilter;
import com.intellij.dupLocator.util.PsiFragment;
import com.intellij.psi.PsiElement;
import com.intellij.psi.impl.source.tree.LeafElement;
import com.intellij.psi.tree.IElementType;
import com.intellij.util.containers.HashMap;
import gnu.trove.TIntObjectHashMap;
import org.jetbrains.annotations.NotNull;
import java.util.*;
/**
* @author Eugene.Kudelevsky
*/
public class DuplicatesMatchingVisitor extends AbstractMatchingVisitor {
private final NodeSpecificHasherBase myNodeSpecificHasher;
private final NodeFilter myNodeFilter;
private final int myDiscardCost;
private final TreeHasherBase myTreeHasher;
private final Map<PsiElement, TreeHashResult> myPsiElement2HashAndCost = new HashMap<>();
public DuplicatesMatchingVisitor(NodeSpecificHasherBase nodeSpecificHasher,
@NotNull NodeFilter nodeFilter,
int discardCost) {
myNodeSpecificHasher = nodeSpecificHasher;
myNodeFilter = nodeFilter;
myDiscardCost = discardCost;
myTreeHasher = new TreeHasherBase(null, myNodeSpecificHasher.getDuplicatesProfile(), discardCost, false) {
@Override
protected TreeHashResult hash(@NotNull PsiElement root, PsiFragment upper, @NotNull NodeSpecificHasher hasher) {
TreeHashResult result = myPsiElement2HashAndCost.get(root);
if (result == null) {
result = super.hash(root, upper, hasher);
myPsiElement2HashAndCost.put(root, result);
}
return result;
}
};
}
@Override
public boolean matchSequentially(NodeIterator nodes, NodeIterator nodes2) {
while (true) {
if (!nodes.hasNext() || !nodes2.hasNext()) {
return !nodes.hasNext() && !nodes2.hasNext();
}
skipIfNeccessary(nodes, nodes2);
skipIfNeccessary(nodes2, nodes);
if (!nodes.hasNext() || !nodes2.hasNext()) {
return !nodes.hasNext() && !nodes2.hasNext();
}
if (!match(nodes.current(), nodes2.current())) {
return false;
}
nodes.advance();
nodes2.advance();
}
}
private static void skipIfNeccessary(NodeIterator nodes, NodeIterator nodes2) {
while (DuplocatorUtil.shouldSkip(nodes2.current(), nodes.current())) {
nodes2.advance();
}
}
@Override
public boolean match(PsiElement element1, PsiElement element2) {
if (element1 == null || element2 == null) {
return element1 == element2;
}
if (myDiscardCost > 0) {
final int cost1 = myTreeHasher.hash(element1, null, myNodeSpecificHasher).getCost();
final int cost2 = myTreeHasher.hash(element2, null, myNodeSpecificHasher).getCost();
if (cost1 < myDiscardCost || cost2 < myDiscardCost) {
return true;
}
}
final DuplicatesProfileBase duplicatesProfile = myNodeSpecificHasher.getDuplicatesProfile();
final PsiElementRole role1 = duplicatesProfile.getRole(element1);
final PsiElementRole role2 = duplicatesProfile.getRole(element2);
final Set<PsiElementRole> skippedRoles = EnumSet.noneOf(PsiElementRole.class);
final ExternalizableDuplocatorState duplocatorState =
duplicatesProfile.getDuplocatorState(duplicatesProfile.getLanguage(element1));
for (PsiElementRole role : PsiElementRole.values()) {
if (!duplocatorState.distinguishRole(role)) {
skippedRoles.add(role);
}
}
if (role1 == role2 && skippedRoles.contains(role1)) {
return true;
}
final EquivalenceDescriptorProvider descriptorProvider = EquivalenceDescriptorProvider.getInstance(element1);
EquivalenceDescriptor descriptor1 = descriptorProvider != null ? descriptorProvider.buildDescriptor(element1) : null;
EquivalenceDescriptor descriptor2 = descriptorProvider != null ? descriptorProvider.buildDescriptor(element2) : null;
PsiElement newElement1 = DuplocatorUtil.skipNodeIfNeccessary(element1, descriptor1, myNodeFilter);
PsiElement newElement2 = DuplocatorUtil.skipNodeIfNeccessary(element2, descriptor2, myNodeFilter);
if (newElement1 != element1 || newElement2 != element2) {
return match(newElement1, newElement2);
}
if (!element1.getClass().equals(element2.getClass())) {
return false;
}
if (descriptor1 != null && descriptor2 != null) {
return DuplocatorUtil.match(descriptor1, descriptor2, this, skippedRoles, duplicatesProfile);
}
if (element1 instanceof LeafElement) {
IElementType elementType1 = ((LeafElement)element1).getElementType();
IElementType elementType2 = ((LeafElement)element2).getElementType();
if (!duplocatorState.distinguishLiterals() &&
duplicatesProfile.getLiterals().contains(elementType1) &&
duplicatesProfile.getLiterals().contains(elementType2)) {
return true;
}
return element1.getText().equals(element2.getText());
}
if (element1.getFirstChild() == null && element1.getTextLength() == 0) {
return element2.getFirstChild() == null && element2.getTextLength() == 0;
}
return matchSequentially(new FilteringNodeIterator(new SiblingNodeIterator(element1.getFirstChild()), getNodeFilter()),
new FilteringNodeIterator(new SiblingNodeIterator(element2.getFirstChild()), getNodeFilter()));
}
@Override
protected boolean doMatchInAnyOrder(NodeIterator it1, NodeIterator it2) {
final List<PsiElement> elements1 = new ArrayList<>();
final List<PsiElement> elements2 = new ArrayList<>();
while (it1.hasNext()) {
final PsiElement element = it1.current();
if (element != null) {
elements1.add(element);
}
it1.advance();
}
while (it2.hasNext()) {
final PsiElement element = it2.current();
if (element != null) {
elements2.add(element);
}
it2.advance();
}
if (elements1.size() != elements2.size()) {
return false;
}
final TIntObjectHashMap<List<PsiElement>> hash2element = new TIntObjectHashMap<>(elements1.size());
for (PsiElement element : elements1) {
final TreeHashResult result = myTreeHasher.hash(element, null, myNodeSpecificHasher);
if (result != null) {
final int hash = result.getHash();
List<PsiElement> list = hash2element.get(hash);
if (list == null) {
list = new ArrayList<>();
hash2element.put(hash, list);
}
list.add(element);
}
}
for (PsiElement element : elements2) {
final TreeHashResult result = myTreeHasher.hash(element, null, myNodeSpecificHasher);
if (result != null) {
final int hash = result.getHash();
final List<PsiElement> list = hash2element.get(hash);
if (list == null) {
return false;
}
boolean found = false;
for (Iterator<PsiElement> it = list.iterator(); it.hasNext();) {
if (match(element, it.next())) {
it.remove();
found = true;
}
}
if (!found) {
return false;
}
if (list.size() == 0) {
hash2element.remove(hash);
}
}
}
return hash2element.size() == 0;
}
@NotNull
@Override
protected NodeFilter getNodeFilter() {
return myNodeFilter;
}
}
| |
// Copyright (c) 2012 Cloudera, Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.cloudera.impala.planner;
import static org.junit.Assert.fail;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.hadoop.fs.Path;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.cloudera.impala.authorization.AuthorizationConfig;
import com.cloudera.impala.analysis.ColumnLineageGraph;
import com.cloudera.impala.catalog.CatalogException;
import com.cloudera.impala.common.AnalysisException;
import com.cloudera.impala.common.ImpalaException;
import com.cloudera.impala.common.InternalException;
import com.cloudera.impala.common.NotImplementedException;
import com.cloudera.impala.common.RuntimeEnv;
import com.cloudera.impala.service.Frontend;
import com.cloudera.impala.testutil.ImpaladTestCatalog;
import com.cloudera.impala.testutil.TestFileParser;
import com.cloudera.impala.testutil.TestFileParser.Section;
import com.cloudera.impala.testutil.TestFileParser.TestCase;
import com.cloudera.impala.testutil.TestUtils;
import com.cloudera.impala.thrift.ImpalaInternalServiceConstants;
import com.cloudera.impala.thrift.TDescriptorTable;
import com.cloudera.impala.thrift.TExecRequest;
import com.cloudera.impala.thrift.TExplainLevel;
import com.cloudera.impala.thrift.THBaseKeyRange;
import com.cloudera.impala.thrift.THdfsFileSplit;
import com.cloudera.impala.thrift.THdfsPartition;
import com.cloudera.impala.thrift.THdfsScanNode;
import com.cloudera.impala.thrift.THdfsTable;
import com.cloudera.impala.thrift.TNetworkAddress;
import com.cloudera.impala.thrift.TPlanFragment;
import com.cloudera.impala.thrift.TPlanNode;
import com.cloudera.impala.thrift.TQueryCtx;
import com.cloudera.impala.thrift.TQueryExecRequest;
import com.cloudera.impala.thrift.TQueryOptions;
import com.cloudera.impala.thrift.TScanRangeLocations;
import com.cloudera.impala.thrift.TTableDescriptor;
import com.cloudera.impala.thrift.TTupleDescriptor;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
public class PlannerTestBase {
private final static Logger LOG = LoggerFactory.getLogger(PlannerTest.class);
private final static boolean GENERATE_OUTPUT_FILE = true;
private static Frontend frontend_ = new Frontend(
AuthorizationConfig.createAuthDisabledConfig(), new ImpaladTestCatalog());
private final String testDir_ = "functional-planner/queries/PlannerTest";
private final String outDir_ = "/tmp/PlannerTest/";
// Map from plan ID (TPlanNodeId) to the plan node with that ID.
private final Map<Integer, TPlanNode> planMap_ = Maps.newHashMap();
// Map from tuple ID (TTupleId) to the tuple descriptor with that ID.
private final Map<Integer, TTupleDescriptor> tupleMap_ = Maps.newHashMap();
// Map from table ID (TTableId) to the table descriptor with that ID.
private final Map<Integer, TTableDescriptor> tableMap_ = Maps.newHashMap();
@BeforeClass
public static void setUp() throws Exception {
// Use 8 cores for resource estimation.
RuntimeEnv.INSTANCE.setNumCores(8);
// Set test env to control the explain level.
RuntimeEnv.INSTANCE.setTestEnv(true);
}
@AfterClass
public static void cleanUp() {
RuntimeEnv.INSTANCE.reset();
}
/**
* Clears the old maps and constructs new maps based on the new
* execRequest so that findPartitions() can locate various thrift
* metadata structures quickly.
*/
private void buildMaps(TQueryExecRequest execRequest) {
// Build maps that will be used by findPartition().
planMap_.clear();
tupleMap_.clear();
tableMap_.clear();
for (TPlanFragment frag: execRequest.fragments) {
for (TPlanNode node: frag.plan.nodes) {
planMap_.put(node.node_id, node);
}
}
if (execRequest.isSetDesc_tbl()) {
TDescriptorTable descTbl = execRequest.desc_tbl;
for (TTupleDescriptor tupleDesc: descTbl.tupleDescriptors) {
tupleMap_.put(tupleDesc.id, tupleDesc);
}
if (descTbl.isSetTableDescriptors()) {
for (TTableDescriptor tableDesc: descTbl.tableDescriptors) {
tableMap_.put(tableDesc.id, tableDesc);
}
}
}
}
/**
* Look up the partition corresponding to the plan node (identified by
* nodeId) and a file split.
*/
private THdfsPartition findPartition(int nodeId, THdfsFileSplit split) {
TPlanNode node = planMap_.get(nodeId);
Preconditions.checkNotNull(node);
Preconditions.checkState(node.node_id == nodeId && node.isSetHdfs_scan_node());
THdfsScanNode scanNode = node.getHdfs_scan_node();
int tupleId = scanNode.getTuple_id();
TTupleDescriptor tupleDesc = tupleMap_.get(tupleId);
Preconditions.checkNotNull(tupleDesc);
Preconditions.checkState(tupleDesc.id == tupleId);
TTableDescriptor tableDesc = tableMap_.get(tupleDesc.tableId);
Preconditions.checkNotNull(tableDesc);
Preconditions.checkState(tableDesc.id == tupleDesc.tableId &&
tableDesc.isSetHdfsTable());
THdfsTable hdfsTable = tableDesc.getHdfsTable();
THdfsPartition partition = hdfsTable.getPartitions().get(split.partition_id);
Preconditions.checkNotNull(partition);
Preconditions.checkState(partition.id == split.partition_id);
return partition;
}
/**
* Verify that all THdfsPartitions included in the descriptor table are referenced by
* at least one scan range or part of an inserted table. PrintScanRangeLocations
* will implicitly verify the converse (it'll fail if a scan range references a
* table/partition descriptor that is not present).
*/
private void testHdfsPartitionsReferenced(TQueryExecRequest execRequest,
String query, StringBuilder errorLog) {
long insertTableId = -1;
// Collect all partitions that are referenced by a scan range.
Set<THdfsPartition> scanRangePartitions = Sets.newHashSet();
if (execRequest.per_node_scan_ranges != null) {
for (Map.Entry<Integer, List<TScanRangeLocations>> entry:
execRequest.per_node_scan_ranges.entrySet()) {
if (entry.getValue() == null) {
continue;
}
for (TScanRangeLocations locations: entry.getValue()) {
if (locations.scan_range.isSetHdfs_file_split()) {
THdfsFileSplit split = locations.scan_range.getHdfs_file_split();
THdfsPartition partition = findPartition(entry.getKey(), split);
scanRangePartitions.add(partition);
}
}
}
}
if (execRequest.isSetFinalize_params()) {
insertTableId = execRequest.getFinalize_params().getTable_id();
}
boolean first = true;
// Iterate through all partitions of the descriptor table and verify all partitions
// are referenced.
if (execRequest.isSetDesc_tbl() && execRequest.desc_tbl.isSetTableDescriptors()) {
for (TTableDescriptor tableDesc: execRequest.desc_tbl.tableDescriptors) {
// All partitions of insertTableId are okay.
if (tableDesc.getId() == insertTableId) continue;
if (!tableDesc.isSetHdfsTable()) continue;
THdfsTable hdfsTable = tableDesc.getHdfsTable();
for (Map.Entry<Long, THdfsPartition> e :
hdfsTable.getPartitions().entrySet()) {
THdfsPartition partition = e.getValue();
if (!scanRangePartitions.contains(partition)) {
if (first) errorLog.append("query:\n" + query + "\n");
errorLog.append(
" unreferenced partition: HdfsTable: " + tableDesc.getId() +
" HdfsPartition: " + partition.getId() + "\n");
first = false;
}
}
}
}
}
/**
* Construct a string representation of the scan ranges for this request.
*/
private StringBuilder PrintScanRangeLocations(TQueryExecRequest execRequest) {
StringBuilder result = new StringBuilder();
if (execRequest.per_node_scan_ranges == null) {
return result;
}
for (Map.Entry<Integer, List<TScanRangeLocations>> entry:
execRequest.per_node_scan_ranges.entrySet()) {
result.append("NODE " + entry.getKey().toString() + ":\n");
if (entry.getValue() == null) {
continue;
}
for (TScanRangeLocations locations: entry.getValue()) {
// print scan range
result.append(" ");
if (locations.scan_range.isSetHdfs_file_split()) {
THdfsFileSplit split = locations.scan_range.getHdfs_file_split();
THdfsPartition partition = findPartition(entry.getKey(), split);
Path filePath = new Path(partition.getLocation(), split.file_name);
filePath = cleanseFilePath(filePath);
result.append("HDFS SPLIT " + filePath.toString() + " "
+ Long.toString(split.offset) + ":" + Long.toString(split.length));
}
if (locations.scan_range.isSetHbase_key_range()) {
THBaseKeyRange keyRange = locations.scan_range.getHbase_key_range();
Integer hostIdx = locations.locations.get(0).host_idx;
TNetworkAddress networkAddress = execRequest.getHost_list().get(hostIdx);
result.append("HBASE KEYRANGE ");
result.append("port=" + networkAddress.port + " ");
if (keyRange.isSetStartKey()) {
result.append(HBaseScanNode.printKey(keyRange.getStartKey().getBytes()));
} else {
result.append("<unbounded>");
}
result.append(":");
if (keyRange.isSetStopKey()) {
result.append(HBaseScanNode.printKey(keyRange.getStopKey().getBytes()));
} else {
result.append("<unbounded>");
}
}
result.append("\n");
}
}
return result;
}
/**
* Normalize components of the given file path, removing any environment- or test-run
* dependent components. For example, substitutes the unique id portion of Impala
* generated file names with a fixed literal. Subclasses should override to do
* filesystem specific cleansing.
*/
protected Path cleanseFilePath(Path path) {
String fileName = path.getName();
Pattern pattern = Pattern.compile("\\w{16}-\\w{16}_\\d+_data");
Matcher matcher = pattern.matcher(fileName);
fileName = matcher.replaceFirst("<UID>_data");
return new Path(path.getParent(), fileName);
}
/**
* Extracts and returns the expected error message from expectedPlan.
* Returns null if expectedPlan is empty or its first element is not an error message.
* The accepted format for error messages is 'not implemented: expected error message'
* Returns the empty string if expectedPlan starts with 'not implemented' but no
* expected error message was given.
*/
private String getExpectedErrorMessage(ArrayList<String> expectedPlan) {
if (expectedPlan.isEmpty()) return null;
if (!expectedPlan.get(0).toLowerCase().startsWith("not implemented")) return null;
// Find first ':' and extract string on right hand side as error message.
int ix = expectedPlan.get(0).indexOf(":");
if (ix + 1 > 0) {
return expectedPlan.get(0).substring(ix + 1).trim();
} else {
return "";
}
}
private void handleNotImplException(String query, String expectedErrorMsg,
StringBuilder errorLog, StringBuilder actualOutput, Throwable e) {
boolean isImplemented = expectedErrorMsg == null;
actualOutput.append("not implemented: " + e.getMessage() + "\n");
if (isImplemented) {
errorLog.append("query:\n" + query + "\nPLAN not implemented: "
+ e.getMessage() + "\n");
} else {
// Compare actual and expected error messages.
if (expectedErrorMsg != null && !expectedErrorMsg.isEmpty()) {
if (!e.getMessage().toLowerCase().equals(expectedErrorMsg.toLowerCase())) {
errorLog.append("query:\n" + query + "\nExpected error message: '"
+ expectedErrorMsg + "'\nActual error message: '"
+ e.getMessage() + "'\n");
}
}
}
}
/**
* Merge the options of b into a and return a
*/
private TQueryOptions mergeQueryOptions(TQueryOptions a, TQueryOptions b) {
for(TQueryOptions._Fields f : TQueryOptions._Fields.values()) {
if (b.isSet(f)) {
a.setFieldValue(f, b.getFieldValue(f));
}
}
return a;
}
private TQueryOptions defaultQueryOptions() {
TQueryOptions options = new TQueryOptions();
options.setExplain_level(TExplainLevel.STANDARD);
options.setAllow_unsupported_formats(true);
options.setExec_single_node_rows_threshold(0);
return options;
}
/**
* Produces single-node and distributed plans for testCase and compares
* plan and scan range results.
* Appends the actual single-node and distributed plan as well as the printed
* scan ranges to actualOutput, along with the requisite section header.
* locations to actualScanRangeLocations; compares both to the appropriate sections
* of 'testCase'.
*/
private void RunTestCase(TestCase testCase, StringBuilder errorLog,
StringBuilder actualOutput, String dbName, TQueryOptions options)
throws CatalogException {
if (options == null) {
options = defaultQueryOptions();
} else {
options = mergeQueryOptions(defaultQueryOptions(), options);
}
String query = testCase.getQuery();
LOG.info("running query " + query);
if (query.isEmpty()) {
throw new IllegalStateException("Cannot plan empty query in line: " +
testCase.getStartingLineNum());
}
TQueryCtx queryCtx = TestUtils.createQueryContext(
dbName, System.getProperty("user.name"));
queryCtx.request.query_options = options;
// single-node plan and scan range locations
testSingleNodePlan(testCase, queryCtx, errorLog, actualOutput);
testDistributedPlan(testCase, queryCtx, errorLog, actualOutput);
testColumnLineageOutput(testCase, queryCtx, errorLog, actualOutput);
}
/**
* Produces single-node plan for testCase and compares actual plan with expected plan,
* as well as the scan range locations.
* If testCase contains no expected single-node plan then this function is a no-op.
*/
private void testSingleNodePlan(TestCase testCase, TQueryCtx queryCtx,
StringBuilder errorLog, StringBuilder actualOutput) throws CatalogException {
ArrayList<String> expectedPlan = testCase.getSectionContents(Section.PLAN);
// Test case has no expected single-node plan. Do not test it.
if (expectedPlan == null || expectedPlan.isEmpty()) return;
String query = testCase.getQuery();
String expectedErrorMsg = getExpectedErrorMessage(expectedPlan);
queryCtx.request.getQuery_options().setNum_nodes(1);
queryCtx.request.setStmt(query);
boolean isImplemented = expectedErrorMsg == null;
StringBuilder explainBuilder = new StringBuilder();
TExecRequest execRequest = null;
String locationsStr = null;
actualOutput.append(Section.PLAN.getHeader() + "\n");
try {
execRequest = frontend_.createExecRequest(queryCtx, explainBuilder);
buildMaps(execRequest.query_exec_request);
String explainStr = removeExplainHeader(explainBuilder.toString());
actualOutput.append(explainStr);
if (!isImplemented) {
errorLog.append(
"query produced PLAN\nquery=" + query + "\nplan=\n" + explainStr);
} else {
LOG.info("single-node plan: " + explainStr);
String result = TestUtils.compareOutput(
Lists.newArrayList(explainStr.split("\n")), expectedPlan, true);
if (!result.isEmpty()) {
errorLog.append("section " + Section.PLAN.toString() + " of query:\n" + query
+ "\n" + result);
}
// Query exec request may not be set for DDL, e.g., CTAS.
if (execRequest.isSetQuery_exec_request()) {
testHdfsPartitionsReferenced(execRequest.query_exec_request, query, errorLog);
locationsStr =
PrintScanRangeLocations(execRequest.query_exec_request).toString();
}
}
} catch (ImpalaException e) {
if (e instanceof AnalysisException) {
e.printStackTrace();
errorLog.append(
"query:\n" + query + "\nanalysis error: " + e.getMessage() + "\n");
return;
} else if (e instanceof InternalException) {
errorLog.append(
"query:\n" + query + "\ninternal error: " + e.getMessage() + "\n");
return;
} if (e instanceof NotImplementedException) {
handleNotImplException(query, expectedErrorMsg, errorLog, actualOutput, e);
} else if (e instanceof CatalogException) {
// TODO: do we need to rethrow?
throw (CatalogException) e;
} else {
errorLog.append(
"query:\n" + query + "\nunhandled exception: " + e.getMessage() + "\n");
}
}
// compare scan range locations
LOG.info("scan range locations: " + locationsStr);
ArrayList<String> expectedLocations =
testCase.getSectionContents(Section.SCANRANGELOCATIONS);
if (expectedLocations.size() > 0 && locationsStr != null) {
// Locations' order does not matter.
String result = TestUtils.compareOutput(
Lists.newArrayList(locationsStr.split("\n")), expectedLocations, false);
if (!result.isEmpty()) {
errorLog.append("section " + Section.SCANRANGELOCATIONS + " of query:\n"
+ query + "\n" + result);
}
actualOutput.append(Section.SCANRANGELOCATIONS.getHeader() + "\n");
// Print the locations out sorted since the order is random and messed up
// the diffs. The values in locationStr contains "Node X" labels as well
// as paths.
ArrayList<String> locations = Lists.newArrayList(locationsStr.split("\n"));
ArrayList<String> perNodeLocations = Lists.newArrayList();
for (int i = 0; i < locations.size(); ++i) {
if (locations.get(i).startsWith("NODE")) {
if (!perNodeLocations.isEmpty()) {
Collections.sort(perNodeLocations);
actualOutput.append(Joiner.on("\n").join(perNodeLocations)).append("\n");
perNodeLocations.clear();
}
actualOutput.append(locations.get(i)).append("\n");
} else {
perNodeLocations.add(locations.get(i));
}
}
if (!perNodeLocations.isEmpty()) {
Collections.sort(perNodeLocations);
actualOutput.append(Joiner.on("\n").join(perNodeLocations)).append("\n");
}
// TODO: check that scan range locations are identical in both cases
}
}
private void testColumnLineageOutput(TestCase testCase, TQueryCtx queryCtx,
StringBuilder errorLog, StringBuilder actualOutput) throws CatalogException {
ArrayList<String> expectedLineage = testCase.getSectionContents(Section.LINEAGE);
if (expectedLineage == null || expectedLineage.isEmpty()) return;
String query = testCase.getQuery();
queryCtx.request.getQuery_options().setNum_nodes(1);
queryCtx.request.setStmt(query);
StringBuilder explainBuilder = new StringBuilder();
TExecRequest execRequest = null;
String lineageGraph = null;
try {
execRequest = frontend_.createExecRequest(queryCtx, explainBuilder);
if (execRequest.isSetQuery_exec_request()) {
lineageGraph = execRequest.query_exec_request.lineage_graph;
} else if (execRequest.isSetCatalog_op_request()) {
lineageGraph = execRequest.catalog_op_request.lineage_graph;
}
} catch (ImpalaException e) {
if (e instanceof AnalysisException) {
e.printStackTrace();
errorLog.append(
"query:\n" + query + "\nanalysis error: " + e.getMessage() + "\n");
return;
} else if (e instanceof InternalException) {
errorLog.append(
"query:\n" + query + "\ninternal error: " + e.getMessage() + "\n");
return;
} if (e instanceof NotImplementedException) {
handleNotImplException(query, "", errorLog, actualOutput, e);
} else if (e instanceof CatalogException) {
throw (CatalogException) e;
} else {
errorLog.append(
"query:\n" + query + "\nunhandled exception: " + e.getMessage() + "\n");
}
}
LOG.info("lineage graph: " + lineageGraph);
ArrayList<String> expected =
testCase.getSectionContents(Section.LINEAGE);
if (expected.size() > 0 && lineageGraph != null) {
String serializedGraph = Joiner.on("\n").join(expected);
ColumnLineageGraph expectedGraph =
ColumnLineageGraph.createFromJSON(serializedGraph);
ColumnLineageGraph outputGraph =
ColumnLineageGraph.createFromJSON(lineageGraph);
if (expectedGraph == null || outputGraph == null ||
!outputGraph.equals(expectedGraph)) {
StringBuilder lineageError = new StringBuilder();
lineageError.append("section " + Section.LINEAGE + " of query:\n"
+ query + "\n");
lineageError.append("Output:\n");
lineageError.append(lineageGraph + "\n");
lineageError.append("Expected:\n");
lineageError.append(serializedGraph + "\n");
errorLog.append(lineageError.toString());
}
actualOutput.append(Section.LINEAGE.getHeader());
actualOutput.append(TestUtils.prettyPrintJson(lineageGraph) + "\n");
}
}
/**
* Produces distributed plan for testCase and compares actual plan with expected plan.
* If testCase contains no expected distributed plan then this function is a no-op.
*/
private void testDistributedPlan(TestCase testCase, TQueryCtx queryCtx,
StringBuilder errorLog, StringBuilder actualOutput) throws CatalogException {
ArrayList<String> expectedPlan =
testCase.getSectionContents(Section.DISTRIBUTEDPLAN);
// Test case has no expected distributed plan. Do not test it.
if (expectedPlan == null || expectedPlan.isEmpty()) return;
String query = testCase.getQuery();
String expectedErrorMsg = getExpectedErrorMessage(expectedPlan);
queryCtx.request.getQuery_options().setNum_nodes(
ImpalaInternalServiceConstants.NUM_NODES_ALL);
queryCtx.request.setStmt(query);
boolean isImplemented = expectedErrorMsg == null;
StringBuilder explainBuilder = new StringBuilder();
actualOutput.append(Section.DISTRIBUTEDPLAN.getHeader() + "\n");
TExecRequest execRequest = null;
try {
// distributed plan
execRequest = frontend_.createExecRequest(queryCtx, explainBuilder);
String explainStr = removeExplainHeader(explainBuilder.toString());
actualOutput.append(explainStr);
if (!isImplemented) {
errorLog.append(
"query produced DISTRIBUTEDPLAN\nquery=" + query + "\nplan=\n"
+ explainStr);
} else {
LOG.info("distributed plan: " + explainStr);
String result = TestUtils.compareOutput(
Lists.newArrayList(explainStr.split("\n")), expectedPlan, true);
if (!result.isEmpty()) {
errorLog.append("section " + Section.DISTRIBUTEDPLAN.toString()
+ " of query:\n" + query + "\n" + result);
}
}
} catch (ImpalaException e) {
if (e instanceof AnalysisException) {
errorLog.append(
"query:\n" + query + "\nanalysis error: " + e.getMessage() + "\n");
return;
} else if (e instanceof InternalException) {
errorLog.append(
"query:\n" + query + "\ninternal error: " + e.getMessage() + "\n");
return;
} if (e instanceof NotImplementedException) {
handleNotImplException(query, expectedErrorMsg, errorLog, actualOutput, e);
} else if (e instanceof CatalogException) {
throw (CatalogException) e;
} else {
errorLog.append(
"query:\n" + query + "\nunhandled exception: " + e.getMessage() + "\n");
}
} catch (IllegalStateException ie) {
errorLog.append(
"query:\n" + query + "\nunhandled exception: " + ie.getMessage() + "\n");
}
}
/**
* Strips out the header containing resource estimates and the warning about missing
* stats from the given explain plan, because the estimates can change easily with
* stats/cardinality.
*/
private String removeExplainHeader(String explain) {
String[] lines = explain.split("\n");
// Find the first empty line - the end of the header.
for (int i = 0; i < lines.length - 1; ++i) {
if (lines[i].isEmpty()) {
return Joiner.on("\n").join(Arrays.copyOfRange(lines, i + 1 , lines.length))
+ "\n";
}
}
return explain;
}
protected void runPlannerTestFile(String testFile, TQueryOptions options) {
runPlannerTestFile(testFile, "default", options);
}
private void runPlannerTestFile(String testFile, String dbName, TQueryOptions options) {
String fileName = testDir_ + "/" + testFile + ".test";
TestFileParser queryFileParser = new TestFileParser(fileName);
StringBuilder actualOutput = new StringBuilder();
queryFileParser.parseFile();
StringBuilder errorLog = new StringBuilder();
for (TestCase testCase : queryFileParser.getTestCases()) {
actualOutput.append(testCase.getSectionAsString(Section.QUERY, true, "\n"));
actualOutput.append("\n");
try {
RunTestCase(testCase, errorLog, actualOutput, dbName, options);
} catch (CatalogException e) {
errorLog.append(String.format("Failed to plan query\n%s\n%s",
testCase.getQuery(), e.getMessage()));
}
actualOutput.append("====\n");
}
// Create the actual output file
if (GENERATE_OUTPUT_FILE) {
try {
File outDirFile = new File(outDir_);
outDirFile.mkdirs();
FileWriter fw = new FileWriter(outDir_ + testFile + ".test");
fw.write(actualOutput.toString());
fw.close();
} catch (IOException e) {
errorLog.append("Unable to create output file: " + e.getMessage());
}
}
if (errorLog.length() != 0) {
fail(errorLog.toString());
}
}
protected void runPlannerTestFile(String testFile) {
runPlannerTestFile(testFile, "default", null);
}
protected void runPlannerTestFile(String testFile, String dbName) {
runPlannerTestFile(testFile, dbName, null);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapreduce.v2.hs.webapp;
import static org.apache.hadoop.mapreduce.v2.app.webapp.AMParams.JOB_ID;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI._EVEN;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI._INFO_WRAP;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI._ODD;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI._TH;
import java.util.Date;
import java.util.List;
import org.apache.hadoop.mapreduce.TaskID;
import org.apache.hadoop.mapreduce.v2.api.records.AMInfo;
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
import org.apache.hadoop.mapreduce.v2.app.AppContext;
import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.ConfEntryInfo;
import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.AMAttemptInfo;
import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobInfo;
import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.mapreduce.v2.util.MRApps.TaskAttemptStateUI;
import org.apache.hadoop.mapreduce.v2.util.MRWebAppUtil;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.util.Times;
import org.apache.hadoop.yarn.webapp.ResponseInfo;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.DIV;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE;
import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
import org.apache.hadoop.yarn.webapp.view.InfoBlock;
import com.google.inject.Inject;
/**
* Render a block of HTML for a give job.
*/
public class HsJobBlock extends HtmlBlock {
final AppContext appContext;
@Inject HsJobBlock(AppContext appctx) {
appContext = appctx;
}
/*
* (non-Javadoc)
* @see org.apache.hadoop.yarn.webapp.view.HtmlBlock#render(org.apache.hadoop.yarn.webapp.view.HtmlBlock.Block)
*/
@Override protected void render(Block html) {
String jid = $(JOB_ID);
if (jid.isEmpty()) {
html.
p()._("Sorry, can't do anything without a JobID.")._();
return;
}
JobId jobID = MRApps.toJobID(jid);
Job j = appContext.getJob(jobID);
if (j == null) {
html.
p()._("Sorry, ", jid, " not found.")._();
return;
}
List<AMInfo> amInfos = j.getAMInfos();
JobInfo job = new JobInfo(j);
ResponseInfo infoBlock = info("Job Overview").
_("Job Name:", job.getName()).
_("User Name:", job.getUserName()).
_("Queue:", job.getQueueName()).
_("State:", job.getState()).
_("Uberized:", job.isUber()).
_("Submitted:", new Date(job.getSubmitTime())).
_("Started:", new Date(job.getStartTime())).
_("Finished:", new Date(job.getFinishTime())).
_("Elapsed:", StringUtils.formatTime(
Times.elapsed(job.getStartTime(), job.getFinishTime(), false)));
String amString =
amInfos.size() == 1 ? "ApplicationMaster" : "ApplicationMasters";
// todo - switch to use JobInfo
List<String> diagnostics = j.getDiagnostics();
if(diagnostics != null && !diagnostics.isEmpty()) {
StringBuffer b = new StringBuffer();
for(String diag: diagnostics) {
b.append(addTaskLinks(diag));
}
infoBlock._("Diagnostics:", b.toString());
}
if(job.getNumMaps() > 0) {
infoBlock._("Average Map Time", StringUtils.formatTime(job.getAvgMapTime()));
}
if(job.getNumReduces() > 0) {
infoBlock._("Average Shuffle Time", StringUtils.formatTime(job.getAvgShuffleTime()));
infoBlock._("Average Merge Time", StringUtils.formatTime(job.getAvgMergeTime()));
infoBlock._("Average Reduce Time", StringUtils.formatTime(job.getAvgReduceTime()));
}
for (ConfEntryInfo entry : job.getAcls()) {
infoBlock._("ACL "+entry.getName()+":", entry.getValue());
}
DIV<Hamlet> div = html.
_(InfoBlock.class).
div(_INFO_WRAP);
// MRAppMasters Table
TABLE<DIV<Hamlet>> table = div.table("#job");
table.
tr().
th(amString).
_().
tr().
th(_TH, "Attempt Number").
th(_TH, "Start Time").
th(_TH, "Node").
th(_TH, "Logs").
_();
boolean odd = false;
for (AMInfo amInfo : amInfos) {
AMAttemptInfo attempt = new AMAttemptInfo(amInfo,
job.getId(), job.getUserName(), "", "");
table.tr((odd = !odd) ? _ODD : _EVEN).
td(String.valueOf(attempt.getAttemptId())).
td(new Date(attempt.getStartTime()).toString()).
td().a(".nodelink", url(MRWebAppUtil.getYARNWebappScheme(),
attempt.getNodeHttpAddress()),
attempt.getNodeHttpAddress())._().
td().a(".logslink", url(attempt.getShortLogsLink()),
"logs")._().
_();
}
table._();
div._();
html.div(_INFO_WRAP).
// Tasks table
table("#job").
tr().
th(_TH, "Task Type").
th(_TH, "Total").
th(_TH, "Complete")._().
tr(_ODD).
th().
a(url("tasks", jid, "m"), "Map")._().
td(String.valueOf(String.valueOf(job.getMapsTotal()))).
td(String.valueOf(String.valueOf(job.getMapsCompleted())))._().
tr(_EVEN).
th().
a(url("tasks", jid, "r"), "Reduce")._().
td(String.valueOf(String.valueOf(job.getReducesTotal()))).
td(String.valueOf(String.valueOf(job.getReducesCompleted())))._()
._().
// Attempts table
table("#job").
tr().
th(_TH, "Attempt Type").
th(_TH, "Failed").
th(_TH, "Killed").
th(_TH, "Successful")._().
tr(_ODD).
th("Maps").
td().a(url("attempts", jid, "m",
TaskAttemptStateUI.FAILED.toString()),
String.valueOf(job.getFailedMapAttempts()))._().
td().a(url("attempts", jid, "m",
TaskAttemptStateUI.KILLED.toString()),
String.valueOf(job.getKilledMapAttempts()))._().
td().a(url("attempts", jid, "m",
TaskAttemptStateUI.SUCCESSFUL.toString()),
String.valueOf(job.getSuccessfulMapAttempts()))._().
_().
tr(_EVEN).
th("Reduces").
td().a(url("attempts", jid, "r",
TaskAttemptStateUI.FAILED.toString()),
String.valueOf(job.getFailedReduceAttempts()))._().
td().a(url("attempts", jid, "r",
TaskAttemptStateUI.KILLED.toString()),
String.valueOf(job.getKilledReduceAttempts()))._().
td().a(url("attempts", jid, "r",
TaskAttemptStateUI.SUCCESSFUL.toString()),
String.valueOf(job.getSuccessfulReduceAttempts()))._().
_().
_().
_();
}
static String addTaskLinks(String text) {
return TaskID.taskIdPattern.matcher(text).replaceAll(
"<a href=\"/jobhistory/task/$0\">$0</a>");
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.