repo_name
stringlengths
5
108
path
stringlengths
6
333
size
stringlengths
1
6
content
stringlengths
4
977k
license
stringclasses
15 values
ThePlexianNetwork/Grumy
lib/lwjgl/jar/src/templates/org/lwjgl/opengl/EXT_direct_state_access.java
71441
/* * Copyright (c) 2002-2008 LWJGL Project * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * * Neither the name of 'LWJGL' nor the names of * its contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.lwjgl.opengl; import org.lwjgl.util.generator.*; import org.lwjgl.util.generator.opengl.*; import java.nio.*; @ForceInit @Dependent @DeprecatedGL public interface EXT_direct_state_access { /** * Accepted by the &lt;pname&gt; parameter of GetBooleanIndexedvEXT, * GetIntegerIndexedvEXT, GetFloatIndexedvEXT, GetDoubleIndexedvEXT: * GetBooleani_v, GetIntegeri_v, GetFloati_vEXT, GetDoublei_vEXT: */ int GL_PROGRAM_MATRIX_EXT = 0x8E2D; int GL_TRANSPOSE_PROGRAM_MATRIX_EXT = 0x8E2E; int GL_PROGRAM_MATRIX_STACK_DEPTH_EXT = 0x8E2F; /* OpenGL 1.1: New client commands */ @DeprecatedGL void glClientAttribDefaultEXT(@GLbitfield int mask); @DeprecatedGL void glPushClientAttribDefaultEXT(@GLbitfield int mask); /* OpenGL 1.0: New matrix commands add "Matrix" prefix to name, drops "Matrix" suffix from name, and add initial "enum matrixMode" parameter */ @StripPostfix("m") @DeprecatedGL void glMatrixLoadfEXT(@GLenum int matrixMode, @Check("16") @Const FloatBuffer m); @StripPostfix("m") @DeprecatedGL void glMatrixLoaddEXT(@GLenum int matrixMode, @Check("16") @Const DoubleBuffer m); @StripPostfix("m") @DeprecatedGL void glMatrixMultfEXT(@GLenum int matrixMode, @Check("16") @Const FloatBuffer m); @StripPostfix("m") @DeprecatedGL void glMatrixMultdEXT(@GLenum int matrixMode, @Check("16") @Const DoubleBuffer m); @DeprecatedGL void glMatrixLoadIdentityEXT(@GLenum int matrixMode); @DeprecatedGL void glMatrixRotatefEXT(@GLenum int matrixMode, float angle, float x, float y, float z); @DeprecatedGL void glMatrixRotatedEXT(@GLenum int matrixMode, double angle, double x, double y, double z); @DeprecatedGL void glMatrixScalefEXT(@GLenum int matrixMode, float x, float y, float z); @DeprecatedGL void glMatrixScaledEXT(@GLenum int matrixMode, double x, double y, double z); @DeprecatedGL void glMatrixTranslatefEXT(@GLenum int matrixMode, float x, float y, float z); @DeprecatedGL void glMatrixTranslatedEXT(@GLenum int matrixMode, double x, double y, double z); @DeprecatedGL void glMatrixOrthoEXT(@GLenum int matrixMode, double l, double r, double b, double t, double n, double f); @DeprecatedGL void glMatrixFrustumEXT(@GLenum int matrixMode, double l, double r, double b, double t, double n, double f); @DeprecatedGL void glMatrixPushEXT(@GLenum int matrixMode); @DeprecatedGL void glMatrixPopEXT(@GLenum int matrixMode); /* OpenGL 1.1: New texture object commands and queries replace "Tex" in name with "Texture" and add initial "uint texture" parameter */ void glTextureParameteriEXT(@GLuint int texture, @GLenum int target, @GLenum int pname, int param); @StripPostfix("param") void glTextureParameterivEXT(@GLuint int texture, @GLenum int target, @GLenum int pname, @Check("4") @Const IntBuffer param); void glTextureParameterfEXT(@GLuint int texture, @GLenum int target, @GLenum int pname, float param); @StripPostfix("param") void glTextureParameterfvEXT(@GLuint int texture, @GLenum int target, @GLenum int pname, @Check("4") @Const FloatBuffer param); void glTextureImage1DEXT(@GLuint int texture, @GLenum int target, int level, int internalformat, @GLsizei int width, int border, @GLenum int format, @GLenum int type, @BufferObject(BufferKind.UnpackPBO) @Check(value = "GLChecks.calculateTexImage1DStorage(pixels, format, type, width)", canBeNull = true) @Const @GLbyte @GLshort @GLint @GLfloat @GLdouble Buffer pixels); void glTextureImage2DEXT(@GLuint int texture, @GLenum int target, int level, int internalformat, @GLsizei int width, @GLsizei int height, int border, @GLenum int format, @GLenum int type, @BufferObject(BufferKind.UnpackPBO) @Check(value = "GLChecks.calculateTexImage2DStorage(pixels, format, type, width, height)", canBeNull = true) @Const @GLbyte @GLshort @GLint @GLfloat @GLdouble Buffer pixels); void glTextureSubImage1DEXT(@GLuint int texture, @GLenum int target, int level, int xoffset, @GLsizei int width, @GLenum int format, @GLenum int type, @BufferObject(BufferKind.UnpackPBO) @Check("GLChecks.calculateImageStorage(pixels, format, type, width, 1, 1)") @Const @GLbyte @GLshort @GLint @GLfloat @GLdouble Buffer pixels); void glTextureSubImage2DEXT(@GLuint int texture, @GLenum int target, int level, int xoffset, int yoffset, @GLsizei int width, @GLsizei int height, @GLenum int format, @GLenum int type, @BufferObject(BufferKind.UnpackPBO) @Check("GLChecks.calculateImageStorage(pixels, format, type, width, height, 1)") @Const @GLbyte @GLshort @GLint @GLfloat @GLdouble Buffer pixels); void glCopyTextureImage1DEXT(@GLuint int texture, @GLenum int target, int level, @GLenum int internalformat, int x, int y, @GLsizei int width, int border); void glCopyTextureImage2DEXT(@GLuint int texture, @GLenum int target, int level, @GLenum int internalformat, int x, int y, @GLsizei int width, @GLsizei int height, int border); void glCopyTextureSubImage1DEXT(@GLuint int texture, @GLenum int target, int level, int xoffset, int x, int y, @GLsizei int width); void glCopyTextureSubImage2DEXT(@GLuint int texture, @GLenum int target, int level, int xoffset, int yoffset, int x, int y, @GLsizei int width, @GLsizei int height); void glGetTextureImageEXT(@GLuint int texture, @GLenum int target, int level, @GLenum int format, @GLenum int type, @OutParameter @BufferObject(BufferKind.PackPBO) @Check("GLChecks.calculateImageStorage(pixels, format, type, 1, 1, 1)") @GLbyte @GLshort @GLint @GLfloat @GLdouble Buffer pixels); @StripPostfix("params") void glGetTextureParameterfvEXT(@GLuint int texture, @GLenum int target, @GLenum int pname, @OutParameter @Check("4") FloatBuffer params); @Alternate("glGetTextureParameterfvEXT") @GLreturn("params") @StripPostfix(value = "params", hasPostfix = false) void glGetTextureParameterfvEXT2(@GLuint int texture, @GLenum int target, @GLenum int pname, @OutParameter FloatBuffer params); @StripPostfix("params") void glGetTextureParameterivEXT(@GLuint int texture, @GLenum int target, @GLenum int pname, @OutParameter @Check("4") IntBuffer params); @Alternate("glGetTextureParameterivEXT") @GLreturn("params") @StripPostfix(value = "params", hasPostfix = false) void glGetTextureParameterivEXT2(@GLuint int texture, @GLenum int target, @GLenum int pname, @OutParameter IntBuffer params); @StripPostfix("params") void glGetTextureLevelParameterfvEXT(@GLuint int texture, @GLenum int target, int level, @GLenum int pname, @OutParameter @Check("4") FloatBuffer params); @Alternate("glGetTextureLevelParameterfvEXT") @GLreturn("params") @StripPostfix(value = "params", hasPostfix = false) void glGetTextureLevelParameterfvEXT2(@GLuint int texture, @GLenum int target, int level, @GLenum int pname, @OutParameter FloatBuffer params); @StripPostfix("params") void glGetTextureLevelParameterivEXT(@GLuint int texture, @GLenum int target, int level, @GLenum int pname, @OutParameter @Check("4") IntBuffer params); @Alternate("glGetTextureLevelParameterivEXT") @GLreturn("params") @StripPostfix(value = "params", hasPostfix = false) void glGetTextureLevelParameterivEXT2(@GLuint int texture, @GLenum int target, int level, @GLenum int pname, @OutParameter IntBuffer params); /* OpenGL 1.2: New 3D texture object commands replace "Tex" in name with "Texture" and adds initial "uint texture" parameter */ @Dependent("OpenGL12") void glTextureImage3DEXT(@GLuint int texture, @GLenum int target, int level, int internalformat, @GLsizei int width, @GLsizei int height, @GLsizei int depth, int border, @GLenum int format, @GLenum int type, @BufferObject(BufferKind.UnpackPBO) @Check(value = "GLChecks.calculateTexImage3DStorage(pixels, format, type, width, height, depth)", canBeNull = true) @Const @GLbyte @GLshort @GLint @GLfloat @GLdouble Buffer pixels); @Dependent("OpenGL12") void glTextureSubImage3DEXT(@GLuint int texture, @GLenum int target, int level, int xoffset, int yoffset, int zoffset, @GLsizei int width, @GLsizei int height, @GLsizei int depth, @GLenum int format, @GLenum int type, @BufferObject(BufferKind.UnpackPBO) @Check("GLChecks.calculateImageStorage(pixels, format, type, width, height, depth)") @Const @GLbyte @GLshort @GLint @GLfloat @GLdouble Buffer pixels); @Dependent("OpenGL12") void glCopyTextureSubImage3DEXT(@GLuint int texture, @GLenum int target, int level, int xoffset, int yoffset, int zoffset, int x, int y, @GLsizei int width, @GLsizei int height); /* OpenGL 1.2.1: New multitexture commands and queries prefix "Multi" before "Tex" and add an initial "enum texunit" parameter (to identify the texture unit */ @Dependent("OpenGL13") void glBindMultiTextureEXT(@GLenum int texunit, @GLenum int target, @GLuint int texture); @Dependent("OpenGL13") @DeprecatedGL void glMultiTexCoordPointerEXT(@GLenum int texunit, int size, @AutoType("pointer") @GLenum int type, @GLsizei int stride, @BufferObject(BufferKind.ArrayVBO) @Check @Const @GLfloat @GLdouble Buffer pointer); @Dependent("OpenGL13") @DeprecatedGL void glMultiTexEnvfEXT(@GLenum int texunit, @GLenum int target, @GLenum int pname, float param); @Dependent("OpenGL13") @StripPostfix("params") @DeprecatedGL void glMultiTexEnvfvEXT(@GLenum int texunit, @GLenum int target, @GLenum int pname, @Check("4") @Const FloatBuffer params); @Dependent("OpenGL13") @DeprecatedGL void glMultiTexEnviEXT(@GLenum int texunit, @GLenum int target, @GLenum int pname, int param); @Dependent("OpenGL13") @StripPostfix("params") @DeprecatedGL void glMultiTexEnvivEXT(@GLenum int texunit, @GLenum int target, @GLenum int pname, @Check("4") @Const IntBuffer params); @Dependent("OpenGL13") @DeprecatedGL void glMultiTexGendEXT(@GLenum int texunit, @GLenum int coord, @GLenum int pname, double param); @Dependent("OpenGL13") @StripPostfix("params") @DeprecatedGL void glMultiTexGendvEXT(@GLenum int texunit, @GLenum int coord, @GLenum int pname, @Check("4") @Const DoubleBuffer params); @Dependent("OpenGL13") @DeprecatedGL void glMultiTexGenfEXT(@GLenum int texunit, @GLenum int coord, @GLenum int pname, float param); @Dependent("OpenGL13") @StripPostfix("params") @DeprecatedGL void glMultiTexGenfvEXT(@GLenum int texunit, @GLenum int coord, @GLenum int pname, @Check("4") @Const FloatBuffer params); @Dependent("OpenGL13") @DeprecatedGL void glMultiTexGeniEXT(@GLenum int texunit, @GLenum int coord, @GLenum int pname, int param); @Dependent("OpenGL13") @StripPostfix("params") @DeprecatedGL void glMultiTexGenivEXT(@GLenum int texunit, @GLenum int coord, @GLenum int pname, @Check("4") @Const IntBuffer params); @Dependent("OpenGL13") @StripPostfix("params") @DeprecatedGL void glGetMultiTexEnvfvEXT(@GLenum int texunit, @GLenum int target, @GLenum int pname, @Check("4") @OutParameter FloatBuffer params); @Dependent("OpenGL13") @StripPostfix("params") @DeprecatedGL void glGetMultiTexEnvivEXT(@GLenum int texunit, @GLenum int target, @GLenum int pname, @Check("4") @OutParameter IntBuffer params); @Dependent("OpenGL13") @StripPostfix("params") @DeprecatedGL void glGetMultiTexGendvEXT(@GLenum int texunit, @GLenum int coord, @GLenum int pname, @Check("4") @OutParameter DoubleBuffer params); @Dependent("OpenGL13") @StripPostfix("params") @DeprecatedGL void glGetMultiTexGenfvEXT(@GLenum int texunit, @GLenum int coord, @GLenum int pname, @Check("4") @OutParameter FloatBuffer params); @Dependent("OpenGL13") @StripPostfix("params") @DeprecatedGL void glGetMultiTexGenivEXT(@GLenum int texunit, @GLenum int coord, @GLenum int pname, @Check("4") @OutParameter IntBuffer params); @Dependent("OpenGL13") void glMultiTexParameteriEXT(@GLenum int texunit, @GLenum int target, @GLenum int pname, int param); @Dependent("OpenGL13") @StripPostfix("param") void glMultiTexParameterivEXT(@GLenum int texunit, @GLenum int target, @GLenum int pname, @Check("4") @Const IntBuffer param); @Dependent("OpenGL13") void glMultiTexParameterfEXT(@GLenum int texunit, @GLenum int target, @GLenum int pname, float param); @Dependent("OpenGL13") @StripPostfix("param") void glMultiTexParameterfvEXT(@GLenum int texunit, @GLenum int target, @GLenum int pname, @Check("4") @Const FloatBuffer param); @Dependent("OpenGL13") void glMultiTexImage1DEXT(@GLenum int texunit, @GLenum int target, int level, int internalformat, @GLsizei int width, int border, @GLenum int format, @GLenum int type, @BufferObject(BufferKind.UnpackPBO) @Check(value = "GLChecks.calculateTexImage1DStorage(pixels, format, type, width)", canBeNull = true) @Const @GLbyte @GLshort @GLint @GLfloat @GLdouble Buffer pixels); @Dependent("OpenGL13") void glMultiTexImage2DEXT(@GLenum int texunit, @GLenum int target, int level, int internalformat, @GLsizei int width, @GLsizei int height, int border, @GLenum int format, @GLenum int type, @BufferObject(BufferKind.UnpackPBO) @Check(value = "GLChecks.calculateTexImage2DStorage(pixels, format, type, width, height)", canBeNull = true) @Const @GLbyte @GLshort @GLint @GLfloat @GLdouble Buffer pixels); @Dependent("OpenGL13") void glMultiTexSubImage1DEXT(@GLenum int texunit, @GLenum int target, int level, int xoffset, @GLsizei int width, @GLenum int format, @GLenum int type, @BufferObject(BufferKind.UnpackPBO) @Check("GLChecks.calculateImageStorage(pixels, format, type, width, 1, 1)") @Const @GLbyte @GLshort @GLint @GLfloat @GLdouble Buffer pixels); @Dependent("OpenGL13") void glMultiTexSubImage2DEXT(@GLenum int texunit, @GLenum int target, int level, int xoffset, int yoffset, @GLsizei int width, @GLsizei int height, @GLenum int format, @GLenum int type, @BufferObject(BufferKind.UnpackPBO) @Check("GLChecks.calculateImageStorage(pixels, format, type, width, height, 1)") @Const @GLbyte @GLshort @GLint @GLfloat @GLdouble Buffer pixels); @Dependent("OpenGL13") void glCopyMultiTexImage1DEXT(@GLenum int texunit, @GLenum int target, int level, @GLenum int internalformat, int x, int y, @GLsizei int width, int border); @Dependent("OpenGL13") void glCopyMultiTexImage2DEXT(@GLenum int texunit, @GLenum int target, int level, @GLenum int internalformat, int x, int y, @GLsizei int width, @GLsizei int height, int border); @Dependent("OpenGL13") void glCopyMultiTexSubImage1DEXT(@GLenum int texunit, @GLenum int target, int level, int xoffset, int x, int y, @GLsizei int width); @Dependent("OpenGL13") void glCopyMultiTexSubImage2DEXT(@GLenum int texunit, @GLenum int target, int level, int xoffset, int yoffset, int x, int y, @GLsizei int width, @GLsizei int height); @Dependent("OpenGL13") void glGetMultiTexImageEXT(@GLenum int texunit, @GLenum int target, int level, @GLenum int format, @GLenum int type, @OutParameter @BufferObject(BufferKind.PackPBO) @Check("GLChecks.calculateImageStorage(pixels, format, type, 1, 1, 1)") @GLbyte @GLshort @GLint @GLfloat @GLdouble Buffer pixels); @Dependent("OpenGL13") @StripPostfix("params") void glGetMultiTexParameterfvEXT(@GLenum int texunit, @GLenum int target, @GLenum int pname, @Check("4") @OutParameter FloatBuffer params); @Alternate("glGetMultiTexParameterfvEXT") @GLreturn("params") @StripPostfix(value = "params", hasPostfix = false) void glGetMultiTexParameterfvEXT2(@GLenum int texunit, @GLenum int target, @GLenum int pname, @OutParameter FloatBuffer params); @Dependent("OpenGL13") @StripPostfix("params") void glGetMultiTexParameterivEXT(@GLenum int texunit, @GLenum int target, @GLenum int pname, @Check("4") @OutParameter IntBuffer params); @Alternate("glGetMultiTexParameterivEXT") @GLreturn("params") @Dependent("OpenGL13") @StripPostfix(value = "params", hasPostfix = false) void glGetMultiTexParameterivEXT2(@GLenum int texunit, @GLenum int target, @GLenum int pname, @OutParameter IntBuffer params); @Dependent("OpenGL13") @StripPostfix("params") void glGetMultiTexLevelParameterfvEXT(@GLenum int texunit, @GLenum int target, int level, @GLenum int pname, @Check("4") @OutParameter FloatBuffer params); @Alternate("glGetMultiTexLevelParameterfvEXT") @GLreturn("params") @Dependent("OpenGL13") @StripPostfix(value = "params", hasPostfix = false) void glGetMultiTexLevelParameterfvEXT2(@GLenum int texunit, @GLenum int target, int level, @GLenum int pname, @OutParameter FloatBuffer params); @Dependent("OpenGL13") @StripPostfix("params") void glGetMultiTexLevelParameterivEXT(@GLenum int texunit, @GLenum int target, int level, @GLenum int pname, @Check("4") @OutParameter IntBuffer params); @Alternate("glGetMultiTexLevelParameterivEXT") @GLreturn("params") @Dependent("OpenGL13") @StripPostfix(value = "params", hasPostfix = false) void glGetMultiTexLevelParameterivEXT2(@GLenum int texunit, @GLenum int target, int level, @GLenum int pname, @OutParameter IntBuffer params); @Dependent("OpenGL13") void glMultiTexImage3DEXT(@GLenum int texunit, @GLenum int target, int level, int internalformat, @GLsizei int width, @GLsizei int height, @GLsizei int depth, int border, @GLenum int format, @GLenum int type, @BufferObject(BufferKind.UnpackPBO) @Check(value = "GLChecks.calculateTexImage3DStorage(pixels, format, type, width, height, depth)", canBeNull = true) @Const @GLbyte @GLshort @GLint @GLfloat @GLdouble Buffer pixels); @Dependent("OpenGL13") void glMultiTexSubImage3DEXT(@GLenum int texunit, @GLenum int target, int level, int xoffset, int yoffset, int zoffset, @GLsizei int width, @GLsizei int height, @GLsizei int depth, @GLenum int format, @GLenum int type, @BufferObject(BufferKind.UnpackPBO) @Check("GLChecks.calculateImageStorage(pixels, format, type, width, height, depth)") @Const @GLbyte @GLshort @GLint @GLfloat @GLdouble Buffer pixels); @Dependent("OpenGL13") void glCopyMultiTexSubImage3DEXT(@GLenum int texunit, @GLenum int target, int level, int xoffset, int yoffset, int zoffset, int x, int y, @GLsizei int width, @GLsizei int height); /* OpenGL 1.2.1: New indexed texture commands and queries append "Indexed" to name and add "uint index" parameter (to identify the texture unit index) after state name parameters (if any) and before state value parameters */ @Dependent("OpenGL13") @DeprecatedGL void glEnableClientStateIndexedEXT(@GLenum int array, @GLuint int index); @Dependent("OpenGL13") @DeprecatedGL void glDisableClientStateIndexedEXT(@GLenum int array, @GLuint int index); /* OpenGL 3.0: New indexed texture commands and queries append "i" to name and add "uint index" parameter (to identify the texture unit index) after state name parameters (if any) and before state value parameters */ @Optional(reason = "AMD does not expose this (last driver checked: 14.7)") @Dependent("OpenGL30") void glEnableClientStateiEXT(@GLenum int array, @GLuint int index); @Optional(reason = "AMD does not expose this (last driver checked: 14.7)") @Dependent("OpenGL30") void glDisableClientStateiEXT(@GLenum int array, @GLuint int index); /* OpenGL 1.2.1: New indexed generic queries (added for indexed texture state) append "Indexed" to name and add "uint index" parameter (to identify the texture unit) after state name parameters (if any) and before state value parameters */ @Dependent("OpenGL13") @StripPostfix("params") void glGetFloatIndexedvEXT(@GLenum int pname, @GLuint int index, @OutParameter @Check("16") FloatBuffer params); @Alternate("glGetFloatIndexedvEXT") @GLreturn("params") @Dependent("OpenGL13") @StripPostfix("params") void glGetFloatIndexedvEXT2(@GLenum int pname, @GLuint int index, @OutParameter FloatBuffer params); @Dependent("OpenGL13") @StripPostfix(value = "params", hasPostfix = false) void glGetDoubleIndexedvEXT(@GLenum int pname, @GLuint int index, @OutParameter @Check("16") DoubleBuffer params); @Alternate("glGetDoubleIndexedvEXT") @GLreturn("params") @Dependent("OpenGL13") @StripPostfix(value = "params", hasPostfix = false) void glGetDoubleIndexedvEXT2(@GLenum int pname, @GLuint int index, @OutParameter DoubleBuffer params); @Dependent("OpenGL13") @StripPostfix("params") void glGetPointerIndexedvEXT(@GLenum int pname, @GLuint int index, @Result @GLvoid ByteBuffer params); /* OpenGL 3.0: New indexed generic queries (added for indexed texture state) replace "v" for "i_v" to name and add "uint index" parameter (to identify the texture unit) after state name parameters (if any) and before state value parameters */ @Optional(reason = "AMD does not expose this (last driver checked: 14.7)") @Dependent("OpenGL30") @StripPostfix("params") void glGetFloati_vEXT(@GLenum int pname, @GLuint int index, @OutParameter @Check("16") FloatBuffer params); @Alternate("glGetFloati_vEXT") @GLreturn("params") @Dependent("OpenGL30") @StripPostfix("params") void glGetFloati_vEXT2(@GLenum int pname, @GLuint int index, @OutParameter FloatBuffer params); @Optional(reason = "AMD does not expose this (last driver checked: 14.7)") @Dependent("OpenGL30") @StripPostfix("params") void glGetDoublei_vEXT(@GLenum int pname, @GLuint int index, @OutParameter @Check("16") DoubleBuffer params); @Alternate("glGetDoublei_vEXT") @GLreturn("params") @Dependent("OpenGL30") @StripPostfix("params") void glGetDoublei_vEXT2(@GLenum int pname, @GLuint int index, @OutParameter DoubleBuffer params); @Optional(reason = "AMD does not expose this (last driver checked: 14.7)") @Dependent("OpenGL30") @StripPostfix(value = "params", postfix = "i_v") void glGetPointeri_vEXT(@GLenum int pname, @GLuint int index, @Result @GLvoid ByteBuffer params); /* OpenGL 1.2.1: Extend the functionality of these EXT_draw_buffers2 commands and queries for multitexture TODO: Why 1.2.1 and not EXT_draw_buffers2? */ @Reuse("EXTDrawBuffers2") @Dependent("OpenGL13") void glEnableIndexedEXT(@GLenum int cap, @GLuint int index); @Reuse("EXTDrawBuffers2") @Dependent("OpenGL13") void glDisableIndexedEXT(@GLenum int cap, @GLuint int index); @Reuse("EXTDrawBuffers2") @Dependent("OpenGL13") boolean glIsEnabledIndexedEXT(@GLenum int cap, @GLuint int index); @Reuse("EXTDrawBuffers2") @Dependent("OpenGL13") @StripPostfix("params") void glGetIntegerIndexedvEXT(@GLenum int pname, @GLuint int index, @OutParameter @Check("16") IntBuffer params); @Reuse("EXTDrawBuffers2") @Alternate("glGetIntegerIndexedvEXT") @GLreturn("params") @Dependent("OpenGL13") @StripPostfix("params") void glGetIntegerIndexedvEXT2(@GLenum int pname, @GLuint int index, @OutParameter IntBuffer params); @Reuse("EXTDrawBuffers2") @Dependent("OpenGL13") @StripPostfix("params") void glGetBooleanIndexedvEXT(@GLenum int pname, @GLuint int index, @OutParameter @Check("4") @GLboolean ByteBuffer params); @Reuse("EXTDrawBuffers2") @Alternate("glGetBooleanIndexedvEXT") @GLreturn("params") @Dependent("OpenGL13") @StripPostfix("params") void glGetBooleanIndexedvEXT2(@GLenum int pname, @GLuint int index, @OutParameter @GLboolean ByteBuffer params); /* ARB_vertex_program: New program commands and queries add "Named" prefix to name and adds initial "uint program" parameter */ @Dependent("GL_ARB_vertex_program") void glNamedProgramStringEXT(@GLuint int program, @GLenum int target, @GLenum int format, @AutoSize("string") @GLsizei int len, @Const @GLvoid Buffer string); @Alternate("glNamedProgramStringEXT") @Dependent("GL_ARB_vertex_program") void glNamedProgramStringEXT(@GLuint int program, @GLenum int target, @GLenum int format, @Constant("string.length()") @GLsizei int length, CharSequence string); @Dependent("GL_ARB_vertex_program") void glNamedProgramLocalParameter4dEXT(@GLuint int program, @GLenum int target, @GLuint int index, double x, double y, double z, double w); @Dependent("GL_ARB_vertex_program") @StripPostfix("params") void glNamedProgramLocalParameter4dvEXT(@GLuint int program, @GLenum int target, @GLuint int index, @Const @Check("4") DoubleBuffer params); @Dependent("GL_ARB_vertex_program") void glNamedProgramLocalParameter4fEXT(@GLuint int program, @GLenum int target, @GLuint int index, float x, float y, float z, float w); @Dependent("GL_ARB_vertex_program") @StripPostfix("params") void glNamedProgramLocalParameter4fvEXT(@GLuint int program, @GLenum int target, @GLuint int index, @Const @Check("4") FloatBuffer params); @Dependent("GL_ARB_vertex_program") @StripPostfix("params") void glGetNamedProgramLocalParameterdvEXT(@GLuint int program, @GLenum int target, @GLuint int index, @OutParameter @Check("4") DoubleBuffer params); @Dependent("GL_ARB_vertex_program") @StripPostfix("params") void glGetNamedProgramLocalParameterfvEXT(@GLuint int program, @GLenum int target, @GLuint int index, @OutParameter @Check("4") FloatBuffer params); @Dependent("GL_ARB_vertex_program") @StripPostfix("params") void glGetNamedProgramivEXT(@GLuint int program, @GLenum int target, @GLenum int pname, @OutParameter @Check("4") IntBuffer params); @Alternate("glGetNamedProgramivEXT") @GLreturn("params") @Dependent("GL_ARB_vertex_program") @StripPostfix("params") void glGetNamedProgramivEXT2(@GLuint int program, @GLenum int target, @GLenum int pname, @OutParameter IntBuffer params); @Dependent("GL_ARB_vertex_program") void glGetNamedProgramStringEXT(@GLuint int program, @GLenum int target, @GLenum int pname, @OutParameter @Check @GLvoid ByteBuffer string); @Alternate("glGetNamedProgramStringEXT") @Code("\t\tint programLength = glGetNamedProgramEXT(program, target, ARBProgram.GL_PROGRAM_LENGTH_ARB);") @GLreturn(value = "paramString", maxLength = "programLength", forceMaxLength = true) void glGetNamedProgramStringEXT2(@GLuint int program, @GLenum int target, @GLenum int pname, @OutParameter @GLchar ByteBuffer paramString); /* OpenGL 1.3: New compressed texture object commands replace "Tex" in name with "Texture" and add initial "uint texture" parameter */ @Dependent("OpenGL13") void glCompressedTextureImage3DEXT(@GLuint int texture, @GLenum int target, int level, @GLenum int internalformat, @GLsizei int width, @GLsizei int height, @GLsizei int depth, int border, @AutoSize("data") @GLsizei int imageSize, @BufferObject(BufferKind.UnpackPBO) @Check @Const @GLvoid ByteBuffer data); @Dependent("OpenGL13") void glCompressedTextureImage2DEXT(@GLuint int texture, @GLenum int target, int level, @GLenum int internalformat, @GLsizei int width, @GLsizei int height, int border, @AutoSize("data") @GLsizei int imageSize, @BufferObject(BufferKind.UnpackPBO) @Check @Const @GLvoid ByteBuffer data); @Dependent("OpenGL13") void glCompressedTextureImage1DEXT(@GLuint int texture, @GLenum int target, int level, @GLenum int internalformat, @GLsizei int width, int border, @AutoSize("data") @GLsizei int imageSize, @BufferObject(BufferKind.UnpackPBO) @Check @Const @GLvoid ByteBuffer data); @Dependent("OpenGL13") void glCompressedTextureSubImage3DEXT(@GLuint int texture, @GLenum int target, int level, int xoffset, int yoffset, int zoffset, @GLsizei int width, @GLsizei int height, @GLsizei int depth, @GLenum int format, @AutoSize("data") @GLsizei int imageSize, @BufferObject(BufferKind.UnpackPBO) @Check @Const @GLvoid ByteBuffer data); @Dependent("OpenGL13") void glCompressedTextureSubImage2DEXT(@GLuint int texture, @GLenum int target, int level, int xoffset, int yoffset, @GLsizei int width, @GLsizei int height, @GLenum int format, @AutoSize("data") @GLsizei int imageSize, @BufferObject(BufferKind.UnpackPBO) @Check @Const @GLvoid ByteBuffer data); @Dependent("OpenGL13") void glCompressedTextureSubImage1DEXT(@GLuint int texture, @GLenum int target, int level, int xoffset, @GLsizei int width, @GLenum int format, @AutoSize("data") @GLsizei int imageSize, @BufferObject(BufferKind.UnpackPBO) @Check @Const @GLvoid ByteBuffer data); @Dependent("OpenGL13") void glGetCompressedTextureImageEXT(@GLuint int texture, @GLenum int target, int level, @OutParameter @BufferObject(BufferKind.PackPBO) @Check @GLbyte @GLshort @GLint Buffer img); /* OpenGL 1.3: New multitexture compressed texture commands and queries prefix "Multi" before "Tex" and add an initial "enum texunit" parameter (to identify the texture unit) */ @Dependent("OpenGL13") void glCompressedMultiTexImage3DEXT(@GLenum int texunit, @GLenum int target, int level, @GLenum int internalformat, @GLsizei int width, @GLsizei int height, @GLsizei int depth, int border, @AutoSize("data") @GLsizei int imageSize, @BufferObject(BufferKind.UnpackPBO) @Check @Const @GLvoid ByteBuffer data); @Dependent("OpenGL13") void glCompressedMultiTexImage2DEXT(@GLenum int texunit, @GLenum int target, int level, @GLenum int internalformat, @GLsizei int width, @GLsizei int height, int border, @AutoSize("data") @GLsizei int imageSize, @BufferObject(BufferKind.UnpackPBO) @Check @Const @GLvoid ByteBuffer data); @Dependent("OpenGL13") void glCompressedMultiTexImage1DEXT(@GLenum int texunit, @GLenum int target, int level, @GLenum int internalformat, @GLsizei int width, int border, @AutoSize("data") @GLsizei int imageSize, @BufferObject(BufferKind.UnpackPBO) @Check @Const @GLvoid ByteBuffer data); @Dependent("OpenGL13") void glCompressedMultiTexSubImage3DEXT(@GLenum int texunit, @GLenum int target, int level, int xoffset, int yoffset, int zoffset, @GLsizei int width, @GLsizei int height, @GLsizei int depth, @GLenum int format, @AutoSize("data") @GLsizei int imageSize, @BufferObject(BufferKind.UnpackPBO) @Check @Const @GLvoid ByteBuffer data); @Dependent("OpenGL13") void glCompressedMultiTexSubImage2DEXT(@GLenum int texunit, @GLenum int target, int level, int xoffset, int yoffset, @GLsizei int width, @GLsizei int height, @GLenum int format, @AutoSize("data") @GLsizei int imageSize, @BufferObject(BufferKind.UnpackPBO) @Check @Const @GLvoid ByteBuffer data); @Dependent("OpenGL13") void glCompressedMultiTexSubImage1DEXT(@GLenum int texunit, @GLenum int target, int level, int xoffset, @GLsizei int width, @GLenum int format, @AutoSize("data") @GLsizei int imageSize, @BufferObject(BufferKind.UnpackPBO) @Check @Const @GLvoid ByteBuffer data); @Dependent("OpenGL13") void glGetCompressedMultiTexImageEXT(@GLenum int texunit, @GLenum int target, int level, @OutParameter @BufferObject(BufferKind.PackPBO) @Check @GLbyte @GLshort @GLint Buffer img); /* OpenGL 1.3: New transpose matrix commands add "Matrix" suffix to name, drops "Matrix" suffix from name, and add initial "enum matrixMode" parameter */ @Dependent("OpenGL13") @StripPostfix("m") @DeprecatedGL void glMatrixLoadTransposefEXT(@GLenum int matrixMode, @Check("16") @Const FloatBuffer m); @Dependent("OpenGL13") @StripPostfix("m") @DeprecatedGL void glMatrixLoadTransposedEXT(@GLenum int matrixMode, @Check("16") @Const DoubleBuffer m); @Dependent("OpenGL13") @StripPostfix("m") @DeprecatedGL void glMatrixMultTransposefEXT(@GLenum int matrixMode, @Check("16") @Const FloatBuffer m); @Dependent("OpenGL13") @StripPostfix("m") @DeprecatedGL void glMatrixMultTransposedEXT(@GLenum int matrixMode, @Check("16") @Const DoubleBuffer m); /* OpenGL 1.5: New buffer commands and queries replace "Buffer" with "NamedBuffer" in name and replace "enum target" parameter with "uint buffer" */ @Dependent("OpenGL15") @GenerateAutos void glNamedBufferDataEXT(@GLuint int buffer, @AutoSize("data") @GLsizeiptr long size, @Check @Const @GLbyte @GLshort @GLint @GLfloat @GLdouble Buffer data, @GLenum int usage); @Dependent("OpenGL15") void glNamedBufferSubDataEXT(@GLuint int buffer, @GLintptr long offset, @AutoSize("data") @GLsizeiptr long size, @Check @Const @GLbyte @GLshort @GLint @GLfloat @GLdouble Buffer data); /** * glMapNamedBufferEXT maps a GL buffer object to a ByteBuffer. The old_buffer argument can be null, * in which case a new ByteBuffer will be created, pointing to the returned memory. If old_buffer is non-null, * it will be returned if it points to the same mapped memory and has the same capacity as the buffer object, * otherwise a new ByteBuffer is created. That way, an application will normally use glMapNamedBufferEXT like this: * <p/> * ByteBuffer mapped_buffer; mapped_buffer = glMapNamedBufferEXT(..., ..., null); ... // Another map on the same buffer mapped_buffer = glMapNamedBufferEXT(..., ..., mapped_buffer); * <p/> * Only ByteBuffers returned from this method are to be passed as the old_buffer argument. User-created ByteBuffers cannot be reused. * <p/> * The version of this method without an explicit length argument calls glGetNamedBufferParameterEXT internally to * retrieve the current buffer object size, which may cause a pipeline flush and reduce application performance. * <p/> * The version of this method with an explicit length argument is a fast alternative to the one without. No GL call * is made to retrieve the buffer object size, so the user is responsible for tracking and using the appropriate length.<br> * Security warning: The length argument should match the buffer object size. Reading from or writing to outside * the memory region that corresponds to the mapped buffer object will cause native crashes. * * @param length the length of the mapped memory in bytes. * @param old_buffer A ByteBuffer. If this argument points to the same address and has the same capacity as the new mapping, it will be returned and no new buffer will be created. * * @return A ByteBuffer representing the mapped buffer memory. */ @Dependent("OpenGL15") @CachedResult @GLvoid @AutoSize("glGetNamedBufferParameterEXT(buffer, GL15.GL_BUFFER_SIZE)") ByteBuffer glMapNamedBufferEXT(@GLuint int buffer, @GLenum int access); @Dependent("OpenGL15") boolean glUnmapNamedBufferEXT(@GLuint int buffer); @Dependent("OpenGL15") @StripPostfix("params") void glGetNamedBufferParameterivEXT(@GLuint int buffer, @GLenum int pname, @OutParameter @Check("4") IntBuffer params); @Alternate("glGetNamedBufferParameterivEXT") @GLreturn("params") @Dependent("OpenGL15") @StripPostfix("params") void glGetNamedBufferParameterivEXT2(@GLuint int buffer, @GLenum int pname, @OutParameter IntBuffer params); @Dependent("OpenGL15") @StripPostfix("params") @AutoSize("glGetNamedBufferParameterEXT(buffer, GL15.GL_BUFFER_SIZE)") void glGetNamedBufferPointervEXT(@GLuint int buffer, @GLenum int pname, @OutParameter @Result @GLvoid ByteBuffer params); @Dependent("OpenGL15") void glGetNamedBufferSubDataEXT(@GLuint int buffer, @GLintptr long offset, @AutoSize("data") @GLsizeiptr long size, @OutParameter @Check @GLbyte @GLshort @GLint @GLfloat @GLdouble Buffer data); /* OpenGL 2.0: New uniform commands add "Program" prefix to name and add initial "uint program" parameter */ @Dependent("OpenGL20") void glProgramUniform1fEXT(@GLuint int program, int location, float v0); @Dependent("OpenGL20") void glProgramUniform2fEXT(@GLuint int program, int location, float v0, float v1); @Dependent("OpenGL20") void glProgramUniform3fEXT(@GLuint int program, int location, float v0, float v1, float v2); @Dependent("OpenGL20") void glProgramUniform4fEXT(@GLuint int program, int location, float v0, float v1, float v2, float v3); @Dependent("OpenGL20") void glProgramUniform1iEXT(@GLuint int program, int location, int v0); @Dependent("OpenGL20") void glProgramUniform2iEXT(@GLuint int program, int location, int v0, int v1); @Dependent("OpenGL20") void glProgramUniform3iEXT(@GLuint int program, int location, int v0, int v1, int v2); @Dependent("OpenGL20") void glProgramUniform4iEXT(@GLuint int program, int location, int v0, int v1, int v2, int v3); @Dependent("OpenGL20") @StripPostfix("value") void glProgramUniform1fvEXT(@GLuint int program, int location, @AutoSize(value = "value") @GLsizei int count, @Const FloatBuffer value); @Dependent("OpenGL20") @StripPostfix("value") void glProgramUniform2fvEXT(@GLuint int program, int location, @AutoSize(value = "value", expression = " >> 1") @GLsizei int count, @Const FloatBuffer value); @Dependent("OpenGL20") @StripPostfix("value") void glProgramUniform3fvEXT(@GLuint int program, int location, @AutoSize(value = "value", expression = " / 3") @GLsizei int count, @Const FloatBuffer value); @Dependent("OpenGL20") @StripPostfix("value") void glProgramUniform4fvEXT(@GLuint int program, int location, @AutoSize(value = "value", expression = " >> 2") @GLsizei int count, @Const FloatBuffer value); @Dependent("OpenGL20") @StripPostfix("value") void glProgramUniform1ivEXT(@GLuint int program, int location, @AutoSize(value = "value") @GLsizei int count, @Const IntBuffer value); @Dependent("OpenGL20") @StripPostfix("value") void glProgramUniform2ivEXT(@GLuint int program, int location, @AutoSize(value = "value", expression = " >> 1") @GLsizei int count, @Const IntBuffer value); @Dependent("OpenGL20") @StripPostfix("value") void glProgramUniform3ivEXT(@GLuint int program, int location, @AutoSize(value = "value", expression = " / 3") @GLsizei int count, @Const IntBuffer value); @Dependent("OpenGL20") @StripPostfix("value") void glProgramUniform4ivEXT(@GLuint int program, int location, @AutoSize(value = "value", expression = " >> 2") @GLsizei int count, @Const IntBuffer value); @Dependent("OpenGL20") @StripPostfix("value") void glProgramUniformMatrix2fvEXT(@GLuint int program, int location, @AutoSize(value = "value", expression = " >> 2") @GLsizei int count, boolean transpose, @Const FloatBuffer value); @Dependent("OpenGL20") @StripPostfix("value") void glProgramUniformMatrix3fvEXT(@GLuint int program, int location, @AutoSize(value = "value", expression = " / (3 * 3)") @GLsizei int count, boolean transpose, @Const FloatBuffer value); @Dependent("OpenGL20") @StripPostfix("value") void glProgramUniformMatrix4fvEXT(@GLuint int program, int location, @AutoSize(value = "value", expression = " >> 4") @GLsizei int count, boolean transpose, @Const FloatBuffer value); /* OpenGL 2.1: New uniform matrix commands add "Program" prefix to name and add initial "uint program" parameter */ @Dependent("OpenGL21") @StripPostfix("value") void glProgramUniformMatrix2x3fvEXT(@GLuint int program, int location, @AutoSize(value = "value", expression = " / (2 * 3)") @GLsizei int count, boolean transpose, @Const FloatBuffer value); @Dependent("OpenGL21") @StripPostfix("value") void glProgramUniformMatrix3x2fvEXT(@GLuint int program, int location, @AutoSize(value = "value", expression = " / (3 * 2)") @GLsizei int count, boolean transpose, @Const FloatBuffer value); @Dependent("OpenGL21") @StripPostfix("value") void glProgramUniformMatrix2x4fvEXT(@GLuint int program, int location, @AutoSize(value = "value", expression = " >> 3") @GLsizei int count, boolean transpose, @Const FloatBuffer value); @Dependent("OpenGL21") @StripPostfix("value") void glProgramUniformMatrix4x2fvEXT(@GLuint int program, int location, @AutoSize(value = "value", expression = " >> 3") @GLsizei int count, boolean transpose, @Const FloatBuffer value); @Dependent("OpenGL21") @StripPostfix("value") void glProgramUniformMatrix3x4fvEXT(@GLuint int program, int location, @AutoSize(value = "value", expression = " / (3 * 4)") @GLsizei int count, boolean transpose, @Const FloatBuffer value); @Dependent("OpenGL21") @StripPostfix("value") void glProgramUniformMatrix4x3fvEXT(@GLuint int program, int location, @AutoSize(value = "value", expression = " / (4 * 3)") @GLsizei int count, boolean transpose, @Const FloatBuffer value); /* EXT_texture_buffer_object: New texture buffer object command replaces "Tex" in name with "Texture" and adds initial "uint texture" parameter */ @Dependent("GL_EXT_texture_buffer_object") void glTextureBufferEXT(@GLuint int texture, @GLenum int target, @GLenum int internalformat, @GLuint int buffer); /* EXT_texture_buffer_object: New multitexture texture buffer command prefixes "Multi" before "Tex" and add an initial "enum texunit" parameter (to identify the texture unit) */ @Dependent("GL_EXT_texture_buffer_object") void glMultiTexBufferEXT(@GLenum int texunit, @GLenum int target, @GLenum int internalformat, @GLuint int buffer); /* EXT_texture_integer: New integer texture object commands and queries replace "Tex" in name with "Texture" and add initial "uint texture" parameter */ @Dependent("GL_EXT_texture_integer") @StripPostfix("params") void glTextureParameterIivEXT(@GLuint int texture, @GLenum int target, @GLenum int pname, @Check("4") @Const IntBuffer params); @Alternate("glTextureParameterIivEXT") @Dependent("GL_EXT_texture_integer") @StripPostfix("param") void glTextureParameterIivEXT(@GLuint int texture, @GLenum int target, @GLenum int pname, @Constant(value = "APIUtil.getInt(caps, param)", keepParam = true) int param); @Dependent("GL_EXT_texture_integer") @StripPostfix("params") void glTextureParameterIuivEXT(@GLuint int texture, @GLenum int target, @GLenum int pname, @Check("4") @Const @GLuint IntBuffer params); @Alternate("glTextureParameterIuivEXT") @Dependent("GL_EXT_texture_integer") @StripPostfix("param") void glTextureParameterIuivEXT(@GLuint int texture, @GLenum int target, @GLenum int pname, @Constant(value = "APIUtil.getInt(caps, param)", keepParam = true) @GLuint int param); @Dependent("GL_EXT_texture_integer") @StripPostfix("params") void glGetTextureParameterIivEXT(@GLuint int texture, @GLenum int target, @GLenum int pname, @Check("4") @OutParameter IntBuffer params); @Alternate("glGetTextureParameterIivEXT") @GLreturn("params") @Dependent("GL_EXT_texture_integer") @StripPostfix(value = "params", hasPostfix = false) void glGetTextureParameterIivEXT2(@GLuint int texture, @GLenum int target, @GLenum int pname, @OutParameter IntBuffer params); @Dependent("GL_EXT_texture_integer") @StripPostfix("params") void glGetTextureParameterIuivEXT(@GLuint int texture, @GLenum int target, @GLenum int pname, @Check("4") @OutParameter @GLuint IntBuffer params); @Alternate("glGetTextureParameterIuivEXT") @GLreturn("params") @Dependent("GL_EXT_texture_integer") @StripPostfix(value = "params", hasPostfix = false) void glGetTextureParameterIuivEXT2(@GLuint int texture, @GLenum int target, @GLenum int pname, @OutParameter @GLuint IntBuffer params); /* EXT_texture_integer: New multitexture integer texture commands and queries prefix "Multi" before "Tex" and add an initial "enum texunit" parameter (to identify the texture unit) */ @Dependent("GL_EXT_texture_integer") @StripPostfix("params") void glMultiTexParameterIivEXT(@GLenum int texunit, @GLenum int target, @GLenum int pname, @Check("4") @Const IntBuffer params); @Alternate("glMultiTexParameterIivEXT") @Dependent("GL_EXT_texture_integer") @StripPostfix("param") void glMultiTexParameterIivEXT(@GLenum int texunit, @GLenum int target, @GLenum int pname, @Constant(value = "APIUtil.getInt(caps, param)", keepParam = true) int param); @Dependent("GL_EXT_texture_integer") @StripPostfix("params") void glMultiTexParameterIuivEXT(@GLenum int texunit, @GLenum int target, @GLenum int pname, @Check("4") @Const @GLuint IntBuffer params); @Alternate("glMultiTexParameterIuivEXT") @Dependent("GL_EXT_texture_integer") @StripPostfix("param") void glMultiTexParameterIuivEXT(@GLenum int texunit, @GLenum int target, @GLenum int pname, @Constant(value = "APIUtil.getInt(caps, param)", keepParam = true) int param); @Dependent("GL_EXT_texture_integer") @StripPostfix("params") void glGetMultiTexParameterIivEXT(@GLenum int texunit, @GLenum int target, @GLenum int pname, @Check("4") @OutParameter IntBuffer params); @Alternate("glGetMultiTexParameterIivEXT") @GLreturn("params") @Dependent("GL_EXT_texture_integer") @StripPostfix(value = "params", hasPostfix = false) void glGetMultiTexParameterIivEXT2(@GLenum int texunit, @GLenum int target, @GLenum int pname, @OutParameter IntBuffer params); @Dependent("GL_EXT_texture_integer") @StripPostfix("params") void glGetMultiTexParameterIuivEXT(@GLenum int texunit, @GLenum int target, @GLenum int pname, @Check("4") @OutParameter @GLuint IntBuffer params); @Alternate("glGetMultiTexParameterIuivEXT") @GLreturn("params") @Dependent("GL_EXT_texture_integer") @StripPostfix(value = "params", hasPostfix = false) void glGetMultiTexParameterIuivEXT2(@GLenum int texunit, @GLenum int target, @GLenum int pname, @OutParameter @GLuint IntBuffer params); /* EXT_gpu_shader4: New integer uniform commands add "Program" prefix to name and add initial "uint program" parameter */ @Dependent("GL_EXT_gpu_shader4") void glProgramUniform1uiEXT(@GLuint int program, int location, @GLuint int v0); @Dependent("GL_EXT_gpu_shader4") void glProgramUniform2uiEXT(@GLuint int program, int location, @GLuint int v0, @GLuint int v1); @Dependent("GL_EXT_gpu_shader4") void glProgramUniform3uiEXT(@GLuint int program, int location, @GLuint int v0, @GLuint int v1, @GLuint int v2); @Dependent("GL_EXT_gpu_shader4") void glProgramUniform4uiEXT(@GLuint int program, int location, @GLuint int v0, @GLuint int v1, @GLuint int v2, @GLuint int v3); @Dependent("GL_EXT_gpu_shader4") @StripPostfix("value") void glProgramUniform1uivEXT(@GLuint int program, int location, @AutoSize(value = "value") @GLsizei int count, @Const @GLuint IntBuffer value); @Dependent("GL_EXT_gpu_shader4") @StripPostfix("value") void glProgramUniform2uivEXT(@GLuint int program, int location, @AutoSize(value = "value", expression = " >> 1") @GLsizei int count, @Const @GLuint IntBuffer value); @Dependent("GL_EXT_gpu_shader4") @StripPostfix("value") void glProgramUniform3uivEXT(@GLuint int program, int location, @AutoSize(value = "value", expression = " / 3") @GLsizei int count, @Const @GLuint IntBuffer value); @Dependent("GL_EXT_gpu_shader4") @StripPostfix("value") void glProgramUniform4uivEXT(@GLuint int program, int location, @AutoSize(value = "value", expression = " >> 2") @GLsizei int count, @Const @GLuint IntBuffer value); /* EXT_gpu_program_parameters: New program command adds "Named" prefix to name and adds "uint program" parameter */ @Dependent("GL_EXT_gpu_program_parameters") @StripPostfix("params") void glNamedProgramLocalParameters4fvEXT(@GLuint int program, @GLenum int target, @GLuint int index, @AutoSize(value = "params", expression = " >> 2") @GLsizei int count, @Const FloatBuffer params); /* NV_gpu_program4: New program commands and queries add "Named" prefix to name and replace "enum target" with "uint program" */ @Dependent("GL_NV_gpu_program4") void glNamedProgramLocalParameterI4iEXT(@GLuint int program, @GLenum int target, @GLuint int index, int x, int y, int z, int w); @Dependent("GL_NV_gpu_program4") @StripPostfix("params") void glNamedProgramLocalParameterI4ivEXT(@GLuint int program, @GLenum int target, @GLuint int index, @Check("4") @Const IntBuffer params); @Dependent("GL_NV_gpu_program4") @StripPostfix("params") void glNamedProgramLocalParametersI4ivEXT(@GLuint int program, @GLenum int target, @GLuint int index, @AutoSize(value = "params", expression = " >> 2") @GLsizei int count, @Const IntBuffer params); @Dependent("GL_NV_gpu_program4") void glNamedProgramLocalParameterI4uiEXT(@GLuint int program, @GLenum int target, @GLuint int index, @GLuint int x, @GLuint int y, @GLuint int z, @GLuint int w); @Dependent("GL_NV_gpu_program4") @StripPostfix("params") void glNamedProgramLocalParameterI4uivEXT(@GLuint int program, @GLenum int target, @GLuint int index, @Check("4") @Const @GLuint IntBuffer params); @Dependent("GL_NV_gpu_program4") @StripPostfix("params") void glNamedProgramLocalParametersI4uivEXT(@GLuint int program, @GLenum int target, @GLuint int index, @AutoSize(value = "params", expression = " >> 2") @GLsizei int count, @Const @GLuint IntBuffer params); @Dependent("GL_NV_gpu_program4") @StripPostfix("params") void glGetNamedProgramLocalParameterIivEXT(@GLuint int program, @GLenum int target, @GLuint int index, @Check("4") @OutParameter IntBuffer params); @Dependent("GL_NV_gpu_program4") @StripPostfix("params") void glGetNamedProgramLocalParameterIuivEXT(@GLuint int program, @GLenum int target, @GLuint int index, @Check("4") @OutParameter @GLuint IntBuffer params); /* OpenGL 3.0: New renderbuffer commands add "Named" prefix to name and replace "enum target" with "uint renderbuffer" */ @Dependent("OpenGL30,GL_EXT_framebuffer_object") void glNamedRenderbufferStorageEXT(@GLuint int renderbuffer, @GLenum int internalformat, @GLsizei int width, @GLsizei int height); @Dependent("OpenGL30,GL_EXT_framebuffer_object") @StripPostfix("params") void glGetNamedRenderbufferParameterivEXT(@GLuint int renderbuffer, @GLenum int pname, @Check("4") @OutParameter IntBuffer params); @Alternate("glGetNamedRenderbufferParameterivEXT") @GLreturn("params") @Dependent("OpenGL30,GL_EXT_framebuffer_object") @StripPostfix("params") void glGetNamedRenderbufferParameterivEXT2(@GLuint int renderbuffer, @GLenum int pname, @OutParameter IntBuffer params); /* EXT_framebuffer_multisample: New renderbuffer commands add "Named" prefix to name and replace "enum target" with "uint renderbuffer" */ @Dependent("OpenGL30,GL_EXT_framebuffer_multisample") void glNamedRenderbufferStorageMultisampleEXT(@GLuint int renderbuffer, @GLsizei int samples, @GLenum int internalformat, @GLsizei int width, @GLsizei int height); /* NV_framebuffer_multisample_coverage: New renderbuffer commands add "Named" prefix to name and replace "enum target" with "uint renderbuffer" */ @Dependent("GL_NV_framebuffer_multisample_coverage") void glNamedRenderbufferStorageMultisampleCoverageEXT(@GLuint int renderbuffer, @GLsizei int coverageSamples, @GLsizei int colorSamples, @GLenum int internalformat, @GLsizei int width, @GLsizei int height); /* OpenGL 3.0: New framebuffer commands add "Named" prefix to name and replace "enum target" with "uint framebuffer" */ @Dependent("OpenGL30,GL_EXT_framebuffer_object") @GLenum int glCheckNamedFramebufferStatusEXT(@GLuint int framebuffer, @GLenum int target); @Dependent("OpenGL30,GL_EXT_framebuffer_object") void glNamedFramebufferTexture1DEXT(@GLuint int framebuffer, @GLenum int attachment, @GLenum int textarget, @GLuint int texture, int level); @Dependent("OpenGL30,GL_EXT_framebuffer_object") void glNamedFramebufferTexture2DEXT(@GLuint int framebuffer, @GLenum int attachment, @GLenum int textarget, @GLuint int texture, int level); @Dependent("OpenGL30,GL_EXT_framebuffer_object") void glNamedFramebufferTexture3DEXT(@GLuint int framebuffer, @GLenum int attachment, @GLenum int textarget, @GLuint int texture, int level, int zoffset); @Dependent("OpenGL30,GL_EXT_framebuffer_object") void glNamedFramebufferRenderbufferEXT(@GLuint int framebuffer, @GLenum int attachment, @GLenum int renderbuffertarget, @GLuint int renderbuffer); @Dependent("OpenGL30,GL_EXT_framebuffer_object") @StripPostfix("params") void glGetNamedFramebufferAttachmentParameterivEXT(@GLuint int framebuffer, @GLenum int attachment, @GLenum int pname, @Check("4") @OutParameter IntBuffer params); @Alternate("glGetNamedFramebufferAttachmentParameterivEXT") @GLreturn("params") @Dependent("OpenGL30,GL_EXT_framebuffer_object") @StripPostfix("params") void glGetNamedFramebufferAttachmentParameterivEXT2(@GLuint int framebuffer, @GLenum int attachment, @GLenum int pname, @OutParameter IntBuffer params); /* OpenGL 3.0: New texture commands add "Texture" within name and replace "enum target" with "uint texture" */ @Dependent("OpenGL30,GL_EXT_framebuffer_object") void glGenerateTextureMipmapEXT(@GLuint int texture, @GLenum int target); /* OpenGL 3.0: New texture commands add "MultiTex" within name and replace "enum target" with "enum texunit" */ @Dependent("OpenGL30,GL_EXT_framebuffer_object") void glGenerateMultiTexMipmapEXT(@GLenum int texunit, @GLenum int target); /* OpenGL 3.0: New framebuffer commands */ @Dependent("OpenGL30,GL_EXT_framebuffer_object") void glFramebufferDrawBufferEXT(@GLuint int framebuffer, @GLenum int mode); @Dependent("OpenGL30,GL_EXT_framebuffer_object") void glFramebufferDrawBuffersEXT(@GLuint int framebuffer, @AutoSize("bufs") @GLsizei int n, @Const @GLenum IntBuffer bufs); @Dependent("OpenGL30,GL_EXT_framebuffer_object") void glFramebufferReadBufferEXT(@GLuint int framebuffer, @GLenum int mode); /* OpenGL 3.0: New framebuffer query */ @Dependent("OpenGL30,GL_EXT_framebuffer_object") @StripPostfix("param") void glGetFramebufferParameterivEXT(@GLuint int framebuffer, @GLenum int pname, @Check("4") @OutParameter IntBuffer param); @Alternate("glGetFramebufferParameterivEXT") @GLreturn("param") @Dependent("OpenGL30,GL_EXT_framebuffer_object") @StripPostfix("param") void glGetFramebufferParameterivEXT2(@GLuint int framebuffer, @GLenum int pname, @OutParameter IntBuffer param); /* OpenGL 3.1: New buffer data copy command */ @Dependent("OpenGL31,GL_ARB_copy_buffer") void glNamedCopyBufferSubDataEXT(@GLuint int readBuffer, @GLuint int writeBuffer, @GLintptr long readoffset, @GLintptr long writeoffset, @GLsizeiptr long size); /* EXT_geometry_shader4 or NV_geometry_program4: New framebuffer commands add "Named" prefix to name and replace "enum target" with "uint framebuffer" */ @Dependent("GL_EXT_geometry_shader4,GL_NV_geometry_program4") void glNamedFramebufferTextureEXT(@GLuint int framebuffer, @GLenum int attachment, @GLuint int texture, int level); @Dependent("GL_EXT_geometry_shader4,GL_NV_geometry_program4") void glNamedFramebufferTextureLayerEXT(@GLuint int framebuffer, @GLenum int attachment, @GLuint int texture, int level, int layer); @Dependent("GL_EXT_geometry_shader4,GL_NV_geometry_program4") void glNamedFramebufferTextureFaceEXT(@GLuint int framebuffer, @GLenum int attachment, @GLuint int texture, int level, @GLenum int face); /* NV_explicit_multisample: New texture renderbuffer object command replaces "Tex" in name with "Texture" and add initial "uint texture" parameter */ @Dependent("GL_NV_explicit_multisample") void glTextureRenderbufferEXT(@GLuint int texture, @GLenum int target, @GLuint int renderbuffer); /* NV_explicit_multisample: New multitexture texture renderbuffer command prefixes "Multi" before "Tex" and add an initial "enum texunit" parameter (to identify the texture unit) */ @Dependent("GL_NV_explicit_multisample") void glMultiTexRenderbufferEXT(@GLenum int texunit, @GLenum int target, @GLuint int renderbuffer); /* OpenGL 3.0: New vertex array specification commands for vertex array objects prefix "VertexArray", add initial "uint vaobj" and "uint buffer" parameters, change "Pointer" suffix to "Offset", and change the final parameter from "const void *" to "intptr offset" */ @Dependent("OpenGL30") @DeprecatedGL void glVertexArrayVertexOffsetEXT(@GLuint int vaobj, @GLuint int buffer, int size, @GLenum int type, @GLsizei int stride, @GLintptr long offset); @Dependent("OpenGL30") @DeprecatedGL void glVertexArrayColorOffsetEXT(@GLuint int vaobj, @GLuint int buffer, int size, @GLenum int type, @GLsizei int stride, @GLintptr long offset); @Dependent("OpenGL30") @DeprecatedGL void glVertexArrayEdgeFlagOffsetEXT(@GLuint int vaobj, @GLuint int buffer, @GLsizei int stride, @GLintptr long offset); @Dependent("OpenGL30") void glVertexArrayIndexOffsetEXT(@GLuint int vaobj, @GLuint int buffer, @GLenum int type, @GLsizei int stride, @GLintptr long offset); @Dependent("OpenGL30") @DeprecatedGL void glVertexArrayNormalOffsetEXT(@GLuint int vaobj, @GLuint int buffer, @GLenum int type, @GLsizei int stride, @GLintptr long offset); @Dependent("OpenGL30") @DeprecatedGL void glVertexArrayTexCoordOffsetEXT(@GLuint int vaobj, @GLuint int buffer, int size, @GLenum int type, @GLsizei int stride, @GLintptr long offset); @Dependent("OpenGL30") @DeprecatedGL void glVertexArrayMultiTexCoordOffsetEXT(@GLuint int vaobj, @GLuint int buffer, @GLenum int texunit, int size, @GLenum int type, @GLsizei int stride, @GLintptr long offset); @Dependent("OpenGL30") @DeprecatedGL void glVertexArrayFogCoordOffsetEXT(@GLuint int vaobj, @GLuint int buffer, @GLenum int type, @GLsizei int stride, @GLintptr long offset); @Dependent("OpenGL30") @DeprecatedGL void glVertexArraySecondaryColorOffsetEXT(@GLuint int vaobj, @GLuint int buffer, int size, @GLenum int type, @GLsizei int stride, @GLintptr long offset); @Dependent("OpenGL30") void glVertexArrayVertexAttribOffsetEXT(@GLuint int vaobj, @GLuint int buffer, @GLuint int index, int size, @GLenum int type, boolean normalized, @GLsizei int stride, @GLintptr long offset); @Dependent("OpenGL30") void glVertexArrayVertexAttribIOffsetEXT(@GLuint int vaobj, @GLuint int buffer, @GLuint int index, int size, @GLenum int type, @GLsizei int stride, @GLintptr long offset); /* OpenGL 3.0: New vertex array enable commands for vertex array objects change "ClientState" to "VertexArray" and add an initial "uint vaobj" parameter */ @Dependent("OpenGL30") void glEnableVertexArrayEXT(@GLuint int vaobj, @GLenum int array); @Dependent("OpenGL30") void glDisableVertexArrayEXT(@GLuint int vaobj, @GLenum int array); /* OpenGL 3.0: New vertex attrib array enable commands for vertex array objects change "VertexAttribArray" to "VertexArrayAttrib" and add an initial "uint vaobj" parameter */ @Dependent("OpenGL30") void glEnableVertexArrayAttribEXT(@GLuint int vaobj, @GLuint int index); @Dependent("OpenGL30") void glDisableVertexArrayAttribEXT(@GLuint int vaobj, @GLuint int index); /* OpenGL 3.0: New queries for vertex array objects */ @Dependent("OpenGL30") @StripPostfix("param") void glGetVertexArrayIntegervEXT(@GLuint int vaobj, @GLenum int pname, @OutParameter @Check("16") IntBuffer param); @Alternate("glGetVertexArrayIntegervEXT") @GLreturn("param") @Dependent("OpenGL30") @StripPostfix("param") void glGetVertexArrayIntegervEXT2(@GLuint int vaobj, @GLenum int pname, @OutParameter IntBuffer param); @Dependent("OpenGL30") @StripPostfix("param") void glGetVertexArrayPointervEXT(@GLuint int vaobj, @GLenum int pname, @Result @GLvoid ByteBuffer param); @Dependent("OpenGL30") @StripPostfix(value = "param") void glGetVertexArrayIntegeri_vEXT(@GLuint int vaobj, @GLuint int index, @GLenum int pname, @OutParameter @Check("16") IntBuffer param); @Alternate("glGetVertexArrayIntegeri_vEXT") @GLreturn("param") @Dependent("OpenGL30") @StripPostfix(value = "param", postfix = "_v") void glGetVertexArrayIntegeri_vEXT2(@GLuint int vaobj, @GLuint int index, @GLenum int pname, @OutParameter IntBuffer param); @Dependent("OpenGL30") @StripPostfix(value = "param", postfix = "i_v") void glGetVertexArrayPointeri_vEXT(@GLuint int vaobj, @GLuint int index, @GLenum int pname, @Result @GLvoid ByteBuffer param); /* OpenGL 3.0: New buffer commands replace "Buffer" with "NamedBuffer" in name and replace "enum target" parameter with "uint buffer" */ /** * glMapNamedBufferRangeEXT maps a GL buffer object range to a ByteBuffer. The old_buffer argument can be null, * in which case a new ByteBuffer will be created, pointing to the returned memory. If old_buffer is non-null, * it will be returned if it points to the same mapped memory and has the same capacity as the buffer object, * otherwise a new ByteBuffer is created. That way, an application will normally use glMapNamedBufferRangeEXT like this: * <p/> * ByteBuffer mapped_buffer; mapped_buffer = glMapNamedBufferRangeEXT(..., ..., ..., ..., null); ... // Another map on the same buffer mapped_buffer = glMapNamedBufferRangeEXT(..., ..., ..., ..., mapped_buffer); * <p/> * Only ByteBuffers returned from this method are to be passed as the old_buffer argument. User-created ByteBuffers cannot be reused. * * @param old_buffer A ByteBuffer. If this argument points to the same address and has the same capacity as the new mapping, it will be returned and no new buffer will be created. * * @return A ByteBuffer representing the mapped buffer memory. */ @Dependent("OpenGL30") @CachedResult(isRange = true) @GLvoid @AutoSize("length") ByteBuffer glMapNamedBufferRangeEXT(@GLuint int buffer, @GLintptr long offset, @GLsizeiptr long length, @GLbitfield int access); @Dependent("OpenGL30") void glFlushMappedNamedBufferRangeEXT(@GLuint int buffer, @GLintptr long offset, @GLsizeiptr long length); }
apache-2.0
jdeppe-pivotal/geode
geode-core/src/main/java/org/apache/geode/internal/cache/entries/VMThinLRURegionEntryHeapUUIDKey.java
7032
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.cache.entries; // DO NOT modify this class. It was generated from LeafRegionEntry.cpp import java.util.UUID; import java.util.concurrent.atomic.AtomicLongFieldUpdater; import org.apache.geode.internal.cache.RegionEntryContext; import org.apache.geode.internal.cache.eviction.EvictionController; import org.apache.geode.internal.cache.eviction.EvictionNode; import org.apache.geode.internal.cache.persistence.DiskRecoveryStore; import org.apache.geode.internal.util.concurrent.CustomEntryConcurrentHashMap.HashEntry; /* * macros whose definition changes this class: * * disk: DISK lru: LRU stats: STATS versioned: VERSIONED offheap: OFFHEAP * * One of the following key macros must be defined: * * key object: KEY_OBJECT key int: KEY_INT key long: KEY_LONG key uuid: KEY_UUID key string1: * KEY_STRING1 key string2: KEY_STRING2 */ /** * Do not modify this class. It was generated. Instead modify LeafRegionEntry.cpp and then run * ./dev-tools/generateRegionEntryClasses.sh (it must be run from the top level directory). */ public class VMThinLRURegionEntryHeapUUIDKey extends VMThinLRURegionEntryHeap { // --------------------------------------- common fields ---------------------------------------- private static final AtomicLongFieldUpdater<VMThinLRURegionEntryHeapUUIDKey> LAST_MODIFIED_UPDATER = AtomicLongFieldUpdater.newUpdater(VMThinLRURegionEntryHeapUUIDKey.class, "lastModified"); protected int hash; private HashEntry<Object, Object> nextEntry; @SuppressWarnings("unused") private volatile long lastModified; private volatile Object value; // --------------------------------------- key fields ------------------------------------------- // DO NOT modify this class. It was generated from LeafRegionEntry.cpp private final long keyMostSigBits; private final long keyLeastSigBits; public VMThinLRURegionEntryHeapUUIDKey(final RegionEntryContext context, final UUID key, final Object value) { super(context, value); // DO NOT modify this class. It was generated from LeafRegionEntry.cpp keyMostSigBits = key.getMostSignificantBits(); keyLeastSigBits = key.getLeastSignificantBits(); } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp @Override protected Object getValueField() { return value; } @Override protected void setValueField(final Object value) { this.value = value; } @Override protected long getLastModifiedField() { return LAST_MODIFIED_UPDATER.get(this); } @Override protected boolean compareAndSetLastModifiedField(final long expectedValue, final long newValue) { return LAST_MODIFIED_UPDATER.compareAndSet(this, expectedValue, newValue); } @Override public int getEntryHash() { return hash; } @Override protected void setEntryHash(final int hash) { this.hash = hash; } @Override public HashEntry<Object, Object> getNextEntry() { return nextEntry; } @Override public void setNextEntry(final HashEntry<Object, Object> nextEntry) { this.nextEntry = nextEntry; } // --------------------------------------- eviction code ---------------------------------------- // DO NOT modify this class. It was generated from LeafRegionEntry.cpp @Override public void setDelayedDiskId(final DiskRecoveryStore diskRecoveryStore) { // nothing needed for LRUs with no disk } @Override public synchronized int updateEntrySize(final EvictionController evictionController) { // OFFHEAP: getValue ok w/o incing refcount because we are synced and only getting the size return updateEntrySize(evictionController, getValue()); } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp @Override public synchronized int updateEntrySize(final EvictionController evictionController, final Object value) { int oldSize = getEntrySize(); int newSize = evictionController.entrySize(getKeyForSizing(), value); setEntrySize(newSize); int delta = newSize - oldSize; return delta; } @Override public boolean isRecentlyUsed() { return areAnyBitsSet(RECENTLY_USED); } @Override public void setRecentlyUsed(RegionEntryContext context) { if (!isRecentlyUsed()) { setBits(RECENTLY_USED); context.incRecentlyUsed(); } } @Override public void unsetRecentlyUsed() { clearBits(~RECENTLY_USED); } @Override public boolean isEvicted() { return areAnyBitsSet(EVICTED); } @Override public void setEvicted() { setBits(EVICTED); } @Override public void unsetEvicted() { clearBits(~EVICTED); } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp private EvictionNode nextEvictionNode; private EvictionNode previousEvictionNode; private int size; @Override public void setNext(final EvictionNode nextEvictionNode) { this.nextEvictionNode = nextEvictionNode; } @Override public EvictionNode next() { return nextEvictionNode; } @Override public void setPrevious(final EvictionNode previousEvictionNode) { this.previousEvictionNode = previousEvictionNode; } @Override public EvictionNode previous() { return previousEvictionNode; } @Override public int getEntrySize() { return size; } protected void setEntrySize(final int size) { this.size = size; } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp @Override public Object getKeyForSizing() { // inline keys always report null for sizing since the size comes from the entry size return null; } // ----------------------------------------- key code ------------------------------------------- // DO NOT modify this class. It was generated from LeafRegionEntry.cpp @Override public Object getKey() { return new UUID(keyMostSigBits, keyLeastSigBits); } @Override public boolean isKeyEqual(final Object key) { if (key instanceof UUID) { UUID uuid = (UUID) key; return uuid.getLeastSignificantBits() == keyLeastSigBits && uuid.getMostSignificantBits() == keyMostSigBits; } return false; } // DO NOT modify this class. It was generated from LeafRegionEntry.cpp }
apache-2.0
apache/cocoon
blocks/cocoon-portal/cocoon-portal-portlet-impl/src/main/java/org/apache/cocoon/portal/pluto/om/ServletMapping.java
1575
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cocoon.portal.pluto.om; /** * * @version $Id$ */ public class ServletMapping implements java.io.Serializable { private String id; private String servletName; private String urlPattern; public ServletMapping() { // nothing to do } // additional methods. public String getId() { return id; } public void setId(String id) { this.id = id; } public String getServletName() { return servletName; } public void setServletName(String servletName) { this.servletName = servletName; } public String getUrlPattern() { return urlPattern; } public void setUrlPattern(String urlPattern) { this.urlPattern = urlPattern; } }
apache-2.0
sannies/mp4parser
isoparser/src/main/java/org/mp4parser/boxes/threegpp/ts26244/TitleBox.java
2403
/* * Copyright 2008 CoreMedia AG, Hamburg * * Licensed under the Apache License, Version 2.0 (the License); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an AS IS BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.mp4parser.boxes.threegpp.ts26244; import org.mp4parser.boxes.iso14496.part12.UserDataBox; import org.mp4parser.support.AbstractFullBox; import org.mp4parser.tools.IsoTypeReader; import org.mp4parser.tools.IsoTypeWriter; import org.mp4parser.tools.Utf8; import java.nio.ByteBuffer; /** * <h1>4cc = "{@value #TYPE}"</h1> * <pre> * Box Type: 'titl' * Container: {@link UserDataBox} ('udta') * Mandatory: No * Quantity: Zero or one * </pre> * Title for the media. */ public class TitleBox extends AbstractFullBox { public static final String TYPE = "titl"; private String language; private String title; public TitleBox() { super(TYPE); } public String getLanguage() { return language; } /** * Sets the 3-letter ISO-639 language for this title. * * @param language 3-letter ISO-639 code */ public void setLanguage(String language) { this.language = language; } public String getTitle() { return title; } public void setTitle(String title) { this.title = title; } protected long getContentSize() { return 7 + Utf8.utf8StringLengthInBytes(title); } protected void getContent(ByteBuffer byteBuffer) { writeVersionAndFlags(byteBuffer); IsoTypeWriter.writeIso639(byteBuffer, language); byteBuffer.put(Utf8.convert(title)); byteBuffer.put((byte) 0); } @Override public void _parseDetails(ByteBuffer content) { parseVersionAndFlags(content); language = IsoTypeReader.readIso639(content); title = IsoTypeReader.readString(content); } public String toString() { return "TitleBox[language=" + getLanguage() + ";title=" + getTitle() + "]"; } }
apache-2.0
SmarterApp/ItemAuthoring
sbac-iaip/java/src/main/java/com/pacificmetrics/saaif/item1/ObjectMenuIconsType.java
3415
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.4-2 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2014.10.22 at 03:51:28 PM IST // package com.pacificmetrics.saaif.item1; import java.util.ArrayList; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAnyElement; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlType; import org.w3c.dom.Element; /** * * Define the ObjectMenuIcons type. * Icon images to appear on the palette. * * * <p>Java class for ObjectMenuIconsType complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="ObjectMenuIconsType"> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="IconSpec" type="{http://www.smarterapp.org/xsd/saaif/v1p0/assessmentitem_v1p0.xsd}IconSpecType" maxOccurs="unbounded"/> * &lt;any processContents='lax' namespace='##other' maxOccurs="unbounded" minOccurs="0"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "ObjectMenuIconsType", propOrder = { "iconSpec", "any" }) public class ObjectMenuIconsType { @XmlElement(name = "IconSpec", required = true) protected List<IconSpecType> iconSpec; @XmlAnyElement(lax = true) protected List<Object> any; /** * Gets the value of the iconSpec property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the iconSpec property. * * <p> * For example, to add a new item, do as follows: * <pre> * getIconSpec().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link IconSpecType } * * */ public List<IconSpecType> getIconSpec() { if (iconSpec == null) { iconSpec = new ArrayList<IconSpecType>(); } return this.iconSpec; } /** * Gets the value of the any property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the any property. * * <p> * For example, to add a new item, do as follows: * <pre> * getAny().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link Object } * {@link Element } * * */ public List<Object> getAny() { if (any == null) { any = new ArrayList<Object>(); } return this.any; } }
apache-2.0
Network-of-BioThings/GettinCRAFTy
src/main/resources/gate/plugins/Alignment/src/gate/alignment/gui/AlignmentAction.java
3873
package gate.alignment.gui; import java.util.Set; import gate.Annotation; import gate.Document; import gate.alignment.Alignment; import gate.alignment.AlignmentActionInitializationException; import gate.alignment.AlignmentException; import gate.compound.CompoundDocument; import javax.swing.Icon; /** * This interface represents an AlignmentAction. AlignAction, ResetAction and * RemoveAlignmentAction are examples of the AlignmentAction. In other words, it * represents an action that could take place after the pair is displayed and * before the user specifies to change his/her focus onto a new sentence pair. * If not one of the above three, such an action could be executed with any of * the AlignAction or RemoveAlignmentAction. * * @author niraj */ public interface AlignmentAction { /** * This method is invoked whenever users click on the "Align" button in the * alignment editor gui. * * @param alignmentView * @param task * @param srcAlignedAnnotations - * annotations from the source document that are being aligned. * @param tgtAlignedAnnotations - * annotations from the target document that are being aligned. * @param clickedAnnotation - * the last annotation on which the user had right clicked to invoke * and execute this action. * @throws AlignmentException */ public void executeAlignmentAction(AlignmentView alignmentView, AlignmentTask task, Set<Annotation> srcAlignedAnnotations, Set<Annotation> tgtAlignedAnnotations, Annotation clickedAnnotation) throws AlignmentException; /** * Keep this null in order to be called along with the default align action. * Otherwise it is shown with this title under the options tab in the * alignment editor. * * @return */ public String getCaption(); /** * Keep this null in order to be called along with the default align action. * Otherwise it is shown with this icon under the options tab in the alignment * editor. * * @return */ public Icon getIcon(); /** * Icon's absolte path on the filesystem. * * @return */ public String getIconPath(); /** * Indicates if this action should be displayed when user right clicks on an * aligned annotation. * * @return */ public boolean invokeForAlignedAnnotation(); /** * Indicates if this action should be displayed when user right clicks on an * annotation that is highlighted but is not aligned. * * @return */ public boolean invokeForHighlightedUnalignedAnnotation(); /** * indicates if this action should be displayed when user right clicks on an * annotation that is not highlighted and is not aligned. * * @return */ public boolean invokeForUnhighlightedUnalignedAnnotation(); /** * This method should be used for initializing any resources required by the * execute() method. This method is called whenever it loaded for the first * time. * * @param args * @throws AlignmentActionInitializationException */ public void init(String[] args) throws AlignmentActionInitializationException; /** * This method should free up the memory by releasing any resources occupied * this method. It is called just before the alignment editor is closed. */ public void cleanup(); /** * Indicates if this action should be called along with the default align * action. * * @return */ public boolean invokeWithAlignAction(); /** * Indicates if this action should be called along with the unalign action. * * @return */ public boolean invokeWithRemoveAction(); /** * A tooltip to show whenever user puts his/her mouse on the caption/icon of * this action in the alignment editor. * * @return */ public String getToolTip(); }
apache-2.0
apache/derby
java/org.apache.derby.engine/org/apache/derby/impl/sql/compile/TernaryOperatorNode.java
32119
/* Derby - Class org.apache.derby.impl.sql.compile.TernaryOperatorNode Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.derby.impl.sql.compile; import java.lang.reflect.Modifier; import java.sql.Types; import java.util.List; import org.apache.derby.shared.common.error.StandardException; import org.apache.derby.shared.common.reference.ClassName; import org.apache.derby.shared.common.reference.SQLState; import org.apache.derby.iapi.services.classfile.VMOpcode; import org.apache.derby.iapi.services.compiler.LocalField; import org.apache.derby.iapi.services.compiler.MethodBuilder; import org.apache.derby.iapi.services.context.ContextManager; import org.apache.derby.iapi.services.io.StoredFormatIds; import org.apache.derby.shared.common.sanity.SanityManager; import org.apache.derby.iapi.sql.compile.TypeCompiler; import org.apache.derby.iapi.sql.compile.Visitor; import org.apache.derby.iapi.types.DataTypeDescriptor; import org.apache.derby.iapi.types.TypeId; import org.apache.derby.iapi.util.JBitSet; /** * A TernaryOperatorNode represents a built-in ternary operators. * This covers built-in functions like {@code substr()}. * Java operators are not represented here: the JSQL language allows Java * methods to be called from expressions, but not Java operators. * */ class TernaryOperatorNode extends OperatorNode { // Allowed kinds final static int K_TRIM = 0; final static int K_LOCATE = 1; final static int K_SUBSTRING = 2; final static int K_LIKE = 3; final static int K_TIMESTAMPADD = 4; final static int K_TIMESTAMPDIFF = 5; /** * This class is used to hold logically different objects for * space efficiency. {@code kind} represents the logical object * type. See also {@link ValueNode#isSameNodeKind}. */ final int kind; String operator; String methodName; ValueNode receiver; ValueNode leftOperand; ValueNode rightOperand; String resultInterfaceType; String receiverInterfaceType; String leftInterfaceType; String rightInterfaceType; int trimType; static final String[] TernaryOperators = {"trim", "LOCATE", "substring", "like", "TIMESTAMPADD", "TIMESTAMPDIFF"}; static final String[] TernaryMethodNames = {"ansiTrim", "locate", "substring", "like", "timestampAdd", "timestampDiff"}; static final String[] TernaryResultType = {ClassName.StringDataValue, ClassName.NumberDataValue, ClassName.ConcatableDataValue, ClassName.BooleanDataValue, ClassName.DateTimeDataValue, ClassName.NumberDataValue}; static final String[][] TernaryArgType = { {ClassName.StringDataValue, ClassName.StringDataValue, "java.lang.Integer"}, {ClassName.StringDataValue, ClassName.StringDataValue, ClassName.NumberDataValue}, {ClassName.ConcatableDataValue, ClassName.NumberDataValue, ClassName.NumberDataValue}, {ClassName.DataValueDescriptor, ClassName.DataValueDescriptor, ClassName.DataValueDescriptor}, {ClassName.DateTimeDataValue, "java.lang.Integer", ClassName.NumberDataValue}, // time.timestampadd( interval, count) {ClassName.DateTimeDataValue, "java.lang.Integer", ClassName.DateTimeDataValue}// time2.timestampDiff( interval, time1) }; /** * Constructor for a TernaryOperatorNode * * @param receiver The receiver (e.g., string being operated on in * {@code substr()}) * @param leftOperand The left operand of the node * @param rightOperand The right operand of the node * @param kind The kind of the operand * @param cm The context manager */ TernaryOperatorNode( ValueNode receiver, ValueNode leftOperand, ValueNode rightOperand, int kind, ContextManager cm) { super(cm); this.kind = kind; constructorMinion( receiver, leftOperand, rightOperand, -1); } /** * Constructor for a TernaryOperatorNode * * @param receiver The receiver (e.g., string being operated on in * {@code substr()}) * @param leftOperand The left operand of the node * @param rightOperand The right operand of the node * @param kind The kind of the operand * @param trimType The trim type * @param cm The context manager */ TernaryOperatorNode(ValueNode receiver, ValueNode leftOperand, ValueNode rightOperand, int kind, int trimType, ContextManager cm) { super(cm); this.kind = kind; constructorMinion( receiver, leftOperand, rightOperand, trimType); } private void constructorMinion(ValueNode receiver, ValueNode leftOperand, ValueNode rightOperand, int trimType) { this.receiver = receiver; this.leftOperand = leftOperand; this.rightOperand = rightOperand; this.operator = TernaryOperators[this.kind]; this.methodName = TernaryMethodNames[this.kind]; this.resultInterfaceType = TernaryResultType[this.kind]; this.receiverInterfaceType = TernaryArgType[this.kind][0]; this.leftInterfaceType = TernaryArgType[this.kind][1]; this.rightInterfaceType = TernaryArgType[this.kind][2]; if (trimType != -1) { this.trimType = trimType; } } /** * Convert this object to a String. See comments in QueryTreeNode.java * for how this should be done for tree printing. * * @return This object as a String */ @Override public String toString() { if (SanityManager.DEBUG) { return "operator: " + operator + "\n" + "methodName: " + methodName + "\n" + "resultInterfaceType: " + resultInterfaceType + "\n" + "receiverInterfaceType: " + receiverInterfaceType + "\n" + "leftInterfaceType: " + leftInterfaceType + "\n" + "rightInterfaceType: " + rightInterfaceType + "\n" + super.toString(); } else { return ""; } } /** * Prints the sub-nodes of this object. See QueryTreeNode.java for * how tree printing is supposed to work. * * @param depth The depth of this node in the tree */ @Override void printSubNodes(int depth) { if (SanityManager.DEBUG) { super.printSubNodes(depth); if (receiver != null) { printLabel(depth, "receiver: "); receiver.treePrint(depth + 1); } if (leftOperand != null) { printLabel(depth, "leftOperand: "); leftOperand.treePrint(depth + 1); } if (rightOperand != null) { printLabel(depth, "rightOperand: "); rightOperand.treePrint(depth + 1); } } } /** * Bind this expression. This means binding the sub-expressions, * as well as figuring out what the return type is for this expression. * * @param fromList The FROM list for the query this * expression is in, for binding columns. * @param subqueryList The subquery list being built as we find SubqueryNodes * @param aggregates The aggregate list being built as we find AggregateNodes * * @return The new top of the expression tree. * * @exception StandardException Thrown on error */ @Override ValueNode bindExpression(FromList fromList, SubqueryList subqueryList, List<AggregateNode> aggregates) throws StandardException { receiver = receiver.bindExpression(fromList, subqueryList, aggregates); leftOperand = leftOperand.bindExpression(fromList, subqueryList, aggregates); if (rightOperand != null) { rightOperand = rightOperand.bindExpression(fromList, subqueryList, aggregates); } if (kind == K_TRIM) { trimBind(); } else if (kind == K_LOCATE) { locateBind(); } else if (kind == K_SUBSTRING) { substrBind(); } else if (kind == K_TIMESTAMPADD) { timestampAddBind(); } else if (kind == K_TIMESTAMPDIFF) { timestampDiffBind(); } return this; } /** * Preprocess an expression tree. We do a number of transformations * here (including subqueries, IN lists, LIKE and BETWEEN) plus * subquery flattening. * NOTE: This is done before the outer ResultSetNode is preprocessed. * * @param numTables Number of tables in the DML Statement * @param outerFromList FromList from outer query block * @param outerSubqueryList SubqueryList from outer query block * @param outerPredicateList PredicateList from outer query block * * @return The modified expression * * @exception StandardException Thrown on error */ @Override ValueNode preprocess(int numTables, FromList outerFromList, SubqueryList outerSubqueryList, PredicateList outerPredicateList) throws StandardException { receiver = receiver.preprocess(numTables, outerFromList, outerSubqueryList, outerPredicateList); leftOperand = leftOperand.preprocess(numTables, outerFromList, outerSubqueryList, outerPredicateList); if (rightOperand != null) { rightOperand = rightOperand.preprocess(numTables, outerFromList, outerSubqueryList, outerPredicateList); } return this; } /** * Do code generation for this ternary operator. * * @param acb The ExpressionClassBuilder for the class we're generating * @param mb The method the expression will go into * * * @exception StandardException Thrown on error */ @Override void generateExpression(ExpressionClassBuilder acb, MethodBuilder mb) throws StandardException { int nargs = 0; String receiverType = null; /* Allocate an object for re-use to hold the result of the operator */ LocalField field = acb.newFieldDeclaration(Modifier.PRIVATE, resultInterfaceType); receiver.generateExpression(acb, mb); if (kind == K_TRIM) { mb.push(trimType); leftOperand.generateExpression(acb, mb); mb.cast(leftInterfaceType); mb.getField(field); nargs = 3; receiverType = receiverInterfaceType; } else if (kind == K_LOCATE) { leftOperand.generateExpression(acb, mb); mb.upCast(leftInterfaceType); rightOperand.generateExpression(acb, mb); mb.upCast(rightInterfaceType); mb.getField(field); nargs = 3; } else if (kind == K_SUBSTRING) { leftOperand.generateExpression(acb, mb); mb.upCast(leftInterfaceType); if (rightOperand != null) { rightOperand.generateExpression(acb, mb); mb.upCast(rightInterfaceType); } else { mb.pushNull(rightInterfaceType); } mb.getField(field); // third arg mb.push(receiver.getTypeServices().getMaximumWidth()); nargs = 4; receiverType = receiverInterfaceType; } else if (kind == K_TIMESTAMPADD || kind == K_TIMESTAMPDIFF) { Object intervalType = leftOperand.getConstantValueAsObject(); if( SanityManager.DEBUG) SanityManager.ASSERT( intervalType != null && intervalType instanceof Integer, "Invalid interval type used for " + operator); mb.push( ((Integer) intervalType).intValue()); rightOperand.generateExpression( acb, mb); mb.upCast(TernaryArgType[kind][2]); acb.getCurrentDateExpression( mb); mb.getField(field); nargs = 4; receiverType = receiverInterfaceType; } mb.callMethod(VMOpcode.INVOKEINTERFACE, receiverType, methodName, resultInterfaceType, nargs); /* ** Store the result of the method call in the field, so we can re-use ** the object. */ mb.putField(field); } /** * Set the leftOperand to the specified ValueNode * * @param newLeftOperand The new leftOperand */ void setLeftOperand(ValueNode newLeftOperand) { leftOperand = newLeftOperand; } /** * Get the leftOperand * * @return The current leftOperand. */ ValueNode getLeftOperand() { return leftOperand; } /** * Set the rightOperand to the specified ValueNode * * @param newRightOperand The new rightOperand */ void setRightOperand(ValueNode newRightOperand) { rightOperand = newRightOperand; } /** * Get the rightOperand * * @return The current rightOperand. */ ValueNode getRightOperand() { return rightOperand; } /** * Categorize this predicate. Initially, this means * building a bit map of the referenced tables for each predicate. * If the source of this ColumnReference (at the next underlying level) * is not a ColumnReference or a VirtualColumnNode then this predicate * will not be pushed down. * * For example, in: * select * from (select 1 from s) a (x) where x = 1 * we will not push down x = 1. * NOTE: It would be easy to handle the case of a constant, but if the * inner SELECT returns an arbitrary expression, then we would have to copy * that tree into the pushed predicate, and that tree could contain * subqueries and method calls. * RESOLVE - revisit this issue once we have views. * * @param referencedTabs JBitSet with bit map of referenced FromTables * @param simplePredsOnly Whether or not to consider method * calls, field references and conditional nodes * when building bit map * * @return boolean Whether or not source.expression is a ColumnReference * or a VirtualColumnNode. * @exception StandardException Thrown on error */ @Override boolean categorize(JBitSet referencedTabs, boolean simplePredsOnly) throws StandardException { boolean pushable; pushable = receiver.categorize(referencedTabs, simplePredsOnly); pushable = (leftOperand.categorize(referencedTabs, simplePredsOnly) && pushable); if (rightOperand != null) { pushable = (rightOperand.categorize(referencedTabs, simplePredsOnly) && pushable); } return pushable; } /** * Remap all ColumnReferences in this tree to be clones of the * underlying expression. * * @return ValueNode The remapped expression tree. * * @exception StandardException Thrown on error */ @Override ValueNode remapColumnReferencesToExpressions() throws StandardException { receiver = receiver.remapColumnReferencesToExpressions(); leftOperand = leftOperand.remapColumnReferencesToExpressions(); if (rightOperand != null) { rightOperand = rightOperand.remapColumnReferencesToExpressions(); } return this; } /** * Return whether or not this expression tree represents a constant expression. * * @return Whether or not this expression tree represents a constant expression. */ @Override boolean isConstantExpression() { return (receiver.isConstantExpression() && leftOperand.isConstantExpression() && (rightOperand == null || rightOperand.isConstantExpression())); } /** @see ValueNode#constantExpression */ @Override boolean constantExpression(PredicateList whereClause) { return (receiver.constantExpression(whereClause) && leftOperand.constantExpression(whereClause) && (rightOperand == null || rightOperand.constantExpression(whereClause))); } /** * Accept the visitor for all visitable children of this node. * * @param v the visitor * * @exception StandardException on error */ @Override void acceptChildren(Visitor v) throws StandardException { super.acceptChildren(v); if (receiver != null) { receiver = (ValueNode)receiver.accept(v); } if (leftOperand != null) { leftOperand = (ValueNode)leftOperand.accept(v); } if (rightOperand != null) { rightOperand = (ValueNode)rightOperand.accept(v); } } /** * Bind trim expression. * The variable receiver is the string that needs to be trimmed. * The variable leftOperand is the character that needs to be trimmed from * receiver. * * @return The new top of the expression tree. * * @exception StandardException Thrown on error */ private ValueNode trimBind() throws StandardException { TypeId receiverType; TypeId resultType = TypeId.getBuiltInTypeId(Types.VARCHAR); // handle parameters here /* Is there a ? parameter for the receiver? */ if (receiver.requiresTypeFromContext()) { /* ** According to the SQL standard, if trim has a ? receiver, ** its type is varchar with the implementation-defined maximum length ** for a varchar. */ receiver.setType(getVarcharDescriptor()); //check if this parameter can pick up it's collation from the //character that will be used for trimming. If not(meaning the //character to be trimmed is also a parameter), then it will take //it's collation from the compilation schema. if (!leftOperand.requiresTypeFromContext()) { receiver.setCollationInfo(leftOperand.getTypeServices()); } else { receiver.setCollationUsingCompilationSchema(); } } /* Is there a ? parameter on the left? */ if (leftOperand.requiresTypeFromContext()) { /* Set the left operand type to varchar. */ leftOperand.setType(getVarcharDescriptor()); //collation of ? operand should be picked up from the context. //By the time we come here, receiver will have correct collation //set on it and hence we can rely on it to get correct collation //for the ? for the character that needs to be used for trimming. leftOperand.setCollationInfo(receiver.getTypeServices()); } bindToBuiltIn(); /* ** Check the type of the receiver - this function is allowed only on ** string value types. */ receiverType = receiver.getTypeId(); if (receiverType.userType()) throwBadType("trim", receiverType.getSQLTypeName()); receiver = castArgToString(receiver); if (receiverType.getTypeFormatId() == StoredFormatIds.CLOB_TYPE_ID) { // special case for CLOBs: if we start with a CLOB, we have to get // a CLOB as a result (as opposed to a VARCHAR), because we can have a // CLOB that is beyond the max length of VARCHAR (ex. "clob(100k)"). // This is okay because CLOBs, like VARCHARs, allow variable-length // values (which is a must for the trim to actually work). resultType = receiverType; } /* ** Check the type of the leftOperand (trimSet). ** The leftOperand should be a string value type. */ TypeId leftCTI; leftCTI = leftOperand.getTypeId(); if (leftCTI.userType()) throwBadType("trim", leftCTI.getSQLTypeName()); leftOperand = castArgToString(leftOperand); /* ** The result type of trim is varchar. */ setResultType(resultType); //Result of TRIM should pick up the collation of the character string //that is getting trimmed (which is variable receiver) because it has //correct collation set on it. setCollationInfo(receiver.getTypeServices()); return this; } /* ** set result type for operator */ private void setResultType(TypeId resultType) throws StandardException { setType(new DataTypeDescriptor( resultType, true, receiver.getTypeServices().getMaximumWidth() ) ); } /** * Bind locate operator * The variable receiver is the string which will searched * The variable leftOperand is the search character that will looked in the * receiver variable. * * @return The new top of the expression tree. * * @exception StandardException Thrown on error */ ValueNode locateBind() throws StandardException { TypeId firstOperandType, secondOperandType, offsetType; /* * Is there a ? parameter for the first arg. Copy the * left/firstOperand's. If the left/firstOperand are both parameters, * both will be max length. */ if( receiver.requiresTypeFromContext()) { if( leftOperand.requiresTypeFromContext()) { receiver.setType(getVarcharDescriptor()); //Since both receiver and leftOperands are parameters, use the //collation of compilation schema for receiver. receiver.setCollationUsingCompilationSchema(); } else { if( leftOperand.getTypeId().isStringTypeId() ) { //Since the leftOperand is not a parameter, receiver will //get it's collation from leftOperand through following //setType method receiver.setType( leftOperand.getTypeServices()); } } } /* * Is there a ? parameter for the second arg. Copy the receiver's. * If the receiver are both parameters, both will be max length. */ if(leftOperand.requiresTypeFromContext()) { if(receiver.requiresTypeFromContext()) { leftOperand.setType(getVarcharDescriptor()); } else { if( receiver.getTypeId().isStringTypeId() ) { leftOperand.setType( receiver.getTypeServices()); } } //collation of ? operand should be picked up from the context. //By the time we come here, receiver will have correct collation //set on it and hence we can rely on it to get correct collation //for this ? leftOperand.setCollationInfo(receiver.getTypeServices()); } /* * Is there a ? parameter for the third arg. It will be an int. */ if( rightOperand.requiresTypeFromContext()) { rightOperand.setType( new DataTypeDescriptor(TypeId.INTEGER_ID, true)); } bindToBuiltIn(); /* ** Check the type of the operand - this function is allowed only ** for: receiver = CHAR ** firstOperand = CHAR ** secondOperand = INT */ secondOperandType = leftOperand.getTypeId(); offsetType = rightOperand.getTypeId(); firstOperandType = receiver.getTypeId(); if (!firstOperandType.isStringTypeId() || !secondOperandType.isStringTypeId() || offsetType.getJDBCTypeId() != Types.INTEGER) throw StandardException.newException(SQLState.LANG_DB2_FUNCTION_INCOMPATIBLE, "LOCATE", "FUNCTION"); /* ** The result type of a LocateFunctionNode is an integer. */ setType(new DataTypeDescriptor(TypeId.INTEGER_ID, receiver.getTypeServices().isNullable())); return this; } /* cast arg to a varchar */ protected ValueNode castArgToString(ValueNode vn) throws StandardException { TypeCompiler vnTC = vn.getTypeCompiler(); if (! vn.getTypeId().isStringTypeId()) { DataTypeDescriptor dtd = DataTypeDescriptor.getBuiltInDataTypeDescriptor(Types.VARCHAR, true, vnTC.getCastToCharWidth( vn.getTypeServices())); ValueNode newNode = new CastNode(vn, dtd, getContextManager()); // DERBY-2910 - Match current schema collation for implicit cast as we do for // explicit casts per SQL Spec 6.12 (10) newNode.setCollationUsingCompilationSchema(); ((CastNode) newNode).bindCastNodeOnly(); return newNode; } return vn; } /** * Bind substr expression. * * @return The new top of the expression tree. * * @exception StandardException Thrown on error */ ValueNode substrBind() throws StandardException { TypeId receiverType; TypeId resultType = TypeId.getBuiltInTypeId(Types.VARCHAR); // handle parameters here /* Is there a ? parameter for the receiver? */ if (receiver.requiresTypeFromContext()) { /* ** According to the SQL standard, if substr has a ? receiver, ** its type is varchar with the implementation-defined maximum length ** for a varchar. */ receiver.setType(getVarcharDescriptor()); //collation of ? operand should be same as the compilation schema //because that is the only context available for us to pick up the //collation. There are no other character operands to SUBSTR method //to pick up the collation from. receiver.setCollationUsingCompilationSchema(); } /* Is there a ? parameter on the left? */ if (leftOperand.requiresTypeFromContext()) { /* Set the left operand type to int. */ leftOperand.setType( new DataTypeDescriptor(TypeId.INTEGER_ID, true)); } /* Is there a ? parameter on the right? */ if ((rightOperand != null) && rightOperand.requiresTypeFromContext()) { /* Set the right operand type to int. */ rightOperand.setType( new DataTypeDescriptor(TypeId.INTEGER_ID, true)); } bindToBuiltIn(); if (!leftOperand.getTypeId().isNumericTypeId() || (rightOperand != null && !rightOperand.getTypeId().isNumericTypeId())) throw StandardException.newException(SQLState.LANG_DB2_FUNCTION_INCOMPATIBLE, "SUBSTR", "FUNCTION"); /* ** Check the type of the receiver - this function is allowed only on ** string value types. */ receiverType = receiver.getTypeId(); switch (receiverType.getJDBCTypeId()) { case Types.CHAR: case Types.VARCHAR: case Types.LONGVARCHAR: case Types.CLOB: break; default: { throwBadType("SUBSTR", receiverType.getSQLTypeName()); } } if (receiverType.getTypeFormatId() == StoredFormatIds.CLOB_TYPE_ID) { // special case for CLOBs: if we start with a CLOB, we have to get // a CLOB as a result (as opposed to a VARCHAR), because we can have a // CLOB that is beyond the max length of VARCHAR (ex. "clob(100k)"). // This is okay because CLOBs, like VARCHARs, allow variable-length // values (which is a must for the substr to actually work). resultType = receiverType; } // Determine the maximum length of the result int resultLen = receiver.getTypeServices().getMaximumWidth(); if (rightOperand != null && rightOperand instanceof ConstantNode) { if (((ConstantNode)rightOperand).getValue().getInt() < resultLen) resultLen = ((ConstantNode)rightOperand).getValue().getInt(); } /* ** The result type of substr is a string type */ setType(new DataTypeDescriptor( resultType, true, resultLen )); //Result of SUSBSTR should pick up the collation of the 1st argument //to SUBSTR. The 1st argument to SUBSTR is represented by the variable //receiver in this class. setCollationInfo(receiver.getTypeServices()); return this; } /** * Bind TIMESTAMPADD expression. * * @return The new top of the expression tree. * * @exception StandardException Thrown on error */ private ValueNode timestampAddBind() throws StandardException { if( ! bindParameter( rightOperand, Types.INTEGER)) { int jdbcType = rightOperand.getTypeId().getJDBCTypeId(); if( jdbcType != Types.TINYINT && jdbcType != Types.SMALLINT && jdbcType != Types.INTEGER && jdbcType != Types.BIGINT) throw StandardException.newException(SQLState.LANG_INVALID_FUNCTION_ARG_TYPE, rightOperand.getTypeId().getSQLTypeName(), 2, operator); } bindDateTimeArg( receiver, 3); setType(DataTypeDescriptor.getBuiltInDataTypeDescriptor( Types.TIMESTAMP)); return this; } // end of timestampAddBind /** * Bind TIMESTAMPDIFF expression. * * @return The new top of the expression tree. * * @exception StandardException Thrown on error */ private ValueNode timestampDiffBind() throws StandardException { bindDateTimeArg( rightOperand, 2); bindDateTimeArg( receiver, 3); setType(DataTypeDescriptor.getBuiltInDataTypeDescriptor( Types.BIGINT)); return this; } // End of timestampDiffBind private void bindDateTimeArg( ValueNode arg, int argNumber) throws StandardException { if( ! bindParameter( arg, Types.TIMESTAMP)) { if( ! arg.getTypeId().isDateTimeTimeStampTypeId()) throw StandardException.newException(SQLState.LANG_INVALID_FUNCTION_ARG_TYPE, arg.getTypeId().getSQLTypeName(), argNumber, operator); } } // end of bindDateTimeArg /** * This method gets called for non-character string types and hence no need * to set any collation info. Collation applies only to character string * types. * * @param arg Check if arg is a ? param and if yes, then set it's type to * jdbcType if arg doesn't have a type associated with it. * * @param jdbcType Associate this type with arg if arg is a ? param with no * type associated with it * * @return true if arg is a ? param with no type associated with it * @throws StandardException */ private boolean bindParameter( ValueNode arg, int jdbcType) throws StandardException { if( arg.requiresTypeFromContext() && arg.getTypeId() == null) { arg.setType( new DataTypeDescriptor(TypeId.getBuiltInTypeId( jdbcType), true)); return true; } return false; } // end of bindParameter ValueNode getReceiver() { return receiver; } /* throw bad type message */ private void throwBadType(String funcName, String type) throws StandardException { throw StandardException.newException(SQLState.LANG_UNARY_FUNCTION_BAD_TYPE, funcName, type); } /* bind arguments to built in types */ protected void bindToBuiltIn() throws StandardException { /* If the receiver is not a built-in type, then generate a bound conversion * tree to a built-in type. */ if (receiver.getTypeId().userType()) { receiver = receiver.genSQLJavaSQLTree(); } /* If the left operand is not a built-in type, then generate a bound conversion * tree to a built-in type. */ if (leftOperand.getTypeId().userType()) { leftOperand = leftOperand.genSQLJavaSQLTree(); } /* If the right operand is not a built-in type, then generate a bound conversion * tree to a built-in type. */ if (rightOperand != null) { if (rightOperand.getTypeId().userType()) { rightOperand = rightOperand.genSQLJavaSQLTree(); } } } private DataTypeDescriptor getVarcharDescriptor() { return new DataTypeDescriptor(TypeId.getBuiltInTypeId(Types.VARCHAR), true); } boolean isSameNodeKind(ValueNode o) { return super.isSameNodeKind(o) && ((TernaryOperatorNode)o).kind == this.kind; } boolean isEquivalent(ValueNode o) throws StandardException { if (isSameNodeKind(o)) { TernaryOperatorNode other = (TernaryOperatorNode)o; /* * SUBSTR function can either have 2 or 3 arguments. In the * 2-args case, rightOperand will be null and thus needs * additional handling in the equivalence check. */ return (other.methodName.equals(methodName) && other.receiver.isEquivalent(receiver) && other.leftOperand.isEquivalent(leftOperand) && ( (rightOperand == null && other.rightOperand == null) || (other.rightOperand != null && other.rightOperand.isEquivalent(rightOperand)) ) ); } return false; } }
apache-2.0
hazelcast/hazelcast-simulator
simulator/src/main/java/com/hazelcast/simulator/coordinator/tasks/PrepareSessionTask.java
2092
/* * Copyright (c) 2008-2016, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.simulator.coordinator.tasks; import com.hazelcast.simulator.utils.BashCommand; import java.io.File; import java.util.List; import java.util.Map; import static com.hazelcast.simulator.utils.FileUtils.getConfigurationFile; import static com.hazelcast.simulator.utils.FormatUtils.join; /** * Prepares the session on the remote machines. * * This includes creating the session directory, uploading the 'uploads' directory. * * The real work is done by the 'prepare_session.sh' script. */ public class PrepareSessionTask { private final List<String> agents; private final Map<String, String> simulatorProperties; private final File uploadDir; private final String sessionId; public PrepareSessionTask(List<String> agents, Map<String, String> simulatorProperties, File uploadDir, String sessionId) { this.agents = agents; this.simulatorProperties = simulatorProperties; this.uploadDir = uploadDir; this.sessionId = sessionId; } public void run() { String installFile = getConfigurationFile("prepare_session.sh").getAbsolutePath(); String agentIps = join(agents, ","); new BashCommand(installFile) .addEnvironment(simulatorProperties) .addParams(uploadDir.getAbsolutePath(), sessionId, agentIps) .execute(); } }
apache-2.0
googlesamples/android-play-location
Geofencing/app/src/main/java/com/google/android/gms/location/sample/geofencing/Constants.java
1940
/* * Copyright 2017 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.gms.location.sample.geofencing; import com.google.android.gms.maps.model.LatLng; import java.util.HashMap; /** * Constants used in this sample. */ final class Constants { private Constants() { } private static final String PACKAGE_NAME = "com.google.android.gms.location.Geofence"; static final String GEOFENCES_ADDED_KEY = PACKAGE_NAME + ".GEOFENCES_ADDED_KEY"; /** * Used to set an expiration time for a geofence. After this amount of time Location Services * stops tracking the geofence. */ private static final long GEOFENCE_EXPIRATION_IN_HOURS = 12; /** * For this sample, geofences expire after twelve hours. */ static final long GEOFENCE_EXPIRATION_IN_MILLISECONDS = GEOFENCE_EXPIRATION_IN_HOURS * 60 * 60 * 1000; static final float GEOFENCE_RADIUS_IN_METERS = 1609; // 1 mile, 1.6 km /** * Map for storing information about airports in the San Francisco bay area. */ static final HashMap<String, LatLng> BAY_AREA_LANDMARKS = new HashMap<>(); static { // San Francisco International Airport. BAY_AREA_LANDMARKS.put("SFO", new LatLng(37.621313, -122.378955)); // Googleplex. BAY_AREA_LANDMARKS.put("GOOGLE", new LatLng(37.422611,-122.0840577)); } }
apache-2.0
apache/incubator-systemml
src/main/java/org/apache/sysds/runtime/matrix/data/LibMatrixCuDNNPoolingDescriptors.java
6675
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sysds.runtime.matrix.data; import static jcuda.jcudnn.JCudnn.cudnnCreatePoolingDescriptor; import static jcuda.jcudnn.JCudnn.cudnnCreateTensorDescriptor; import static jcuda.jcudnn.JCudnn.cudnnDestroyTensorDescriptor; import static jcuda.jcudnn.JCudnn.cudnnSetPooling2dDescriptor; import static jcuda.jcudnn.JCudnn.cudnnSetTensor4dDescriptor; import static jcuda.jcudnn.cudnnNanPropagation.CUDNN_PROPAGATE_NAN; import static jcuda.jcudnn.cudnnPoolingMode.CUDNN_POOLING_MAX; import static jcuda.jcudnn.cudnnPoolingMode.CUDNN_POOLING_AVERAGE_COUNT_INCLUDE_PADDING; import static jcuda.jcudnn.cudnnTensorFormat.CUDNN_TENSOR_NCHW; import org.apache.sysds.runtime.instructions.gpu.context.GPUContext; import org.apache.sysds.runtime.matrix.data.LibMatrixDNN.PoolingType; import jcuda.jcudnn.cudnnPoolingDescriptor; import jcuda.jcudnn.cudnnTensorDescriptor; /** * This class is a wrapper that contain necessary data structures to invoke * a cudnn convolution* functions (such as cudnnConvolutionForward, etc) * * It implements autocloseable to simplify the LibMatrixCuDNN code and also avoids potential memory leaks. */ public class LibMatrixCuDNNPoolingDescriptors implements java.lang.AutoCloseable { public cudnnTensorDescriptor xDesc; public cudnnTensorDescriptor yDesc; public cudnnTensorDescriptor dxDesc; public cudnnTensorDescriptor dyDesc; public cudnnPoolingDescriptor poolingDesc; @Override public void close() { if(xDesc != null) cudnnDestroyTensorDescriptor(xDesc); if(yDesc != null) cudnnDestroyTensorDescriptor(yDesc); if(dxDesc != null) cudnnDestroyTensorDescriptor(dxDesc); if(dyDesc != null) cudnnDestroyTensorDescriptor(dyDesc); if(poolingDesc != null) jcuda.jcudnn.JCudnn.cudnnDestroyPoolingDescriptor(poolingDesc); } /** * Get descriptors for maxpooling backward operation * * @param gCtx gpu context * @param instName instruction name * @param N batch size * @param C number of channels * @param H height of image * @param W width of image * @param K number of filters * @param R height of filter * @param S width of filter * @param pad_h vertical padding * @param pad_w horizontal padding * @param stride_h horizontal stride * @param stride_w vertical stride * @param P (H - R + 1 + 2*pad_h)/stride_h * @param Q (W - S + 1 + 2*pad_w)/stride_w * @param poolingType type of pooling * @return decriptor wrapper */ public static LibMatrixCuDNNPoolingDescriptors cudnnPoolingBackwardDescriptors(GPUContext gCtx, String instName, int N, int C, int H, int W, int K, int R, int S, int pad_h, int pad_w, int stride_h, int stride_w, int P, int Q, PoolingType poolingType) { LibMatrixCuDNNPoolingDescriptors ret = new LibMatrixCuDNNPoolingDescriptors(); ret.xDesc = allocateTensorDescriptor(N, C, H, W); ret.yDesc = allocateTensorDescriptor(N, C, P, Q); ret.dxDesc = allocateTensorDescriptor(N, C, H, W); ret.dyDesc = allocateTensorDescriptor(N, C, P, Q); ret.poolingDesc = allocatePoolingDescriptor(R, S, pad_h, pad_w, stride_h, stride_w, poolingType); return ret; } /** * Get descriptors for maxpooling operation * * @param gCtx gpu context * @param instName instruction name * @param N batch size * @param C number of channels * @param H height of image * @param W width of image * @param K number of filters * @param R height of filter * @param S width of filter * @param pad_h vertical padding * @param pad_w horizontal padding * @param stride_h horizontal stride * @param stride_w vertical stride * @param P (H - R + 1 + 2*pad_h)/stride_h * @param Q (W - S + 1 + 2*pad_w)/stride_w * @param poolingType type of pooling * @return decriptor wrapper */ public static LibMatrixCuDNNPoolingDescriptors cudnnPoolingDescriptors(GPUContext gCtx, String instName, int N, int C, int H, int W, int K, int R, int S, int pad_h, int pad_w, int stride_h, int stride_w, int P, int Q, PoolingType poolingType) { LibMatrixCuDNNPoolingDescriptors ret = new LibMatrixCuDNNPoolingDescriptors(); ret.xDesc = allocateTensorDescriptor(N, C, H, W); ret.yDesc = allocateTensorDescriptor(N, C, P, Q); ret.poolingDesc = allocatePoolingDescriptor(R, S, pad_h, pad_w, stride_h, stride_w, poolingType); return ret; } /** * Convenience method to get tensor descriptor * @param N number of images * @param C number of channels * @param H height * @param W width * @return cudnn tensor descriptor */ private static cudnnTensorDescriptor allocateTensorDescriptor(int N, int C, int H, int W) { cudnnTensorDescriptor tensorDescriptor = new cudnnTensorDescriptor(); cudnnCreateTensorDescriptor(tensorDescriptor); cudnnSetTensor4dDescriptor(tensorDescriptor, CUDNN_TENSOR_NCHW, LibMatrixCUDA.CUDNN_DATA_TYPE, N, C, H, W); return tensorDescriptor; } /** * allocates pooling descriptor, used in poolingForward and poolingBackward * @param R pooling window height * @param S pooling window width * @param pad_h vertical padding * @param pad_w horizontal padding * @param stride_h pooling vertical stride * @param stride_w pooling horizontal stride * @param poolingType type of pooling * @return cudnn pooling descriptor */ private static cudnnPoolingDescriptor allocatePoolingDescriptor(int R, int S, int pad_h, int pad_w, int stride_h, int stride_w, PoolingType poolingType) { cudnnPoolingDescriptor poolingDesc = new cudnnPoolingDescriptor(); cudnnCreatePoolingDescriptor(poolingDesc); int CUDNN_POOLING = (poolingType == PoolingType.MAX) ? CUDNN_POOLING_MAX : CUDNN_POOLING_AVERAGE_COUNT_INCLUDE_PADDING; cudnnSetPooling2dDescriptor(poolingDesc, CUDNN_POOLING, CUDNN_PROPAGATE_NAN, R, S, pad_h, pad_w, stride_h, stride_w); return poolingDesc; } }
apache-2.0
GIP-RECIA/esup-publisher-ui
src/main/java/org/esupportail/publisher/security/IPermissionService.java
3223
/** * Copyright (C) 2014 Esup Portail http://www.esup-portail.org * @Author (C) 2012 Julien Gribonvald <julien.gribonvald@recia.fr> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.esupportail.publisher.security; import javax.validation.constraints.NotNull; import org.esupportail.publisher.domain.ContextKey; import org.esupportail.publisher.domain.enums.ContextType; import org.esupportail.publisher.domain.enums.PermissionType; import org.esupportail.publisher.web.rest.dto.PermissionDTO; import org.springframework.security.core.Authentication; import com.mysema.commons.lang.Pair; import com.querydsl.core.types.Predicate; public interface IPermissionService { PermissionType getRoleOfUserInContext(Authentication authentication, @NotNull final ContextKey contextKey); Pair<PermissionType, PermissionDTO> getPermsOfUserInContext(Authentication authentication, @NotNull final ContextKey contextKey); // Role getRoleOfUserInContext(UserDTO from, // Collection<? extends GrantedAuthority> authorities, // ContextKey contextKey); Predicate filterAuthorizedAllOfContextType(Authentication authentication, @NotNull final ContextType contextType, @NotNull final PermissionType permissionType, @NotNull final Predicate predicate); Predicate filterAuthorizedChildsOfContext(Authentication authentication, @NotNull final ContextKey contextKey, @NotNull final PermissionType permissionType, @NotNull final Predicate predicate); boolean canCreateInCtx(Authentication authentication, @NotNull final ContextKey contextKey); boolean canEditCtx(Authentication authentication, @NotNull final ContextKey contextKey); boolean canDeleteCtx(Authentication authentication, @NotNull final ContextKey contextKey); boolean canCreateInCtx(Authentication authentication, @NotNull final long contextId, @NotNull final ContextType contextType); boolean canEditCtx(Authentication authentication, @NotNull final long contextId, @NotNull final ContextType contextType); boolean canDeleteCtx(Authentication authentication, @NotNull final long contextId, @NotNull final ContextType contextType); boolean canEditCtxPerms (Authentication authentication, @NotNull final ContextKey contextKey); boolean canEditCtxTargets (Authentication authentication, @NotNull final ContextKey contextKey); boolean canModerateSomething(Authentication authentication); boolean hasAuthorizedChilds(Authentication authentication, @NotNull final ContextKey contextKey); boolean canHighlightInCtx(Authentication authentication, @NotNull final ContextKey contextKey); }
apache-2.0
rvbaast/migrations
src/main/java/org/apache/ibatis/migration/operations/VersionOperation.java
2898
/** * Copyright 2010-2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ibatis.migration.operations; import java.io.PrintStream; import java.math.BigDecimal; import java.util.List; import org.apache.ibatis.migration.Change; import org.apache.ibatis.migration.ConnectionProvider; import org.apache.ibatis.migration.MigrationException; import org.apache.ibatis.migration.MigrationLoader; import org.apache.ibatis.migration.options.DatabaseOperationOption; public final class VersionOperation extends DatabaseOperation<VersionOperation> { private BigDecimal version; public VersionOperation(BigDecimal version) { super(); this.version = version; if (version == null) { throw new IllegalArgumentException("The version must be null."); } } @Override public VersionOperation operate(ConnectionProvider connectionProvider, MigrationLoader migrationsLoader, DatabaseOperationOption option, PrintStream printStream) { if (option == null) { option = new DatabaseOperationOption(); } ensureVersionExists(migrationsLoader); Change change = getLastAppliedChange(connectionProvider, option); if (change == null || version.compareTo(change.getId()) > 0) { println(printStream, "Upgrading to: " + version); UpOperation up = new UpOperation(1); while (!version.equals(change.getId())) { up.operate(connectionProvider, migrationsLoader, option, printStream); change = getLastAppliedChange(connectionProvider, option); } } else if (version.compareTo(change.getId()) < 0) { println(printStream, "Downgrading to: " + version); DownOperation down = new DownOperation(1); while (!version.equals(change.getId())) { down.operate(connectionProvider, migrationsLoader, option, printStream); change = getLastAppliedChange(connectionProvider, option); } } else { println(printStream, "Already at version: " + version); } println(printStream); return this; } private void ensureVersionExists(MigrationLoader migrationsLoader) { List<Change> migrations = migrationsLoader.getMigrations(); if (!migrations.contains(new Change(version))) { throw new MigrationException("A migration for the specified version number does not exist."); } } }
apache-2.0
jonvestal/open-kilda
src-java/base-topology/base-storm-topology/src/main/java/org/openkilda/wfm/topology/utils/JsonKafkaTranslator.java
1036
/* Copyright 2019 Telstra Open Source * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.openkilda.wfm.topology.utils; import org.openkilda.wfm.CommandContext; import lombok.extern.slf4j.Slf4j; import org.apache.kafka.clients.consumer.ConsumerRecord; @Slf4j public class JsonKafkaTranslator extends GenericKafkaRecordTranslator<String> { @Override protected CommandContext makeContext(ConsumerRecord<?, ?> record, String payload) { return new CommandContext(record); } }
apache-2.0
BigAppOS/BigApp_Discuz_Android
libs/ZUtils/src/com/kit/app/CrashHandler.java
7785
package com.kit.app; import android.annotation.SuppressLint; import android.content.Context; import android.content.pm.PackageInfo; import android.content.pm.PackageManager; import android.content.pm.PackageManager.NameNotFoundException; import android.os.Build; import android.os.Environment; import android.os.Looper; import android.widget.Toast; import com.kit.config.AppConfig; import com.kit.utils.AppUtils; import com.kit.utils.ZogUtils; import com.kit.utils.StringUtils; import java.io.File; import java.io.FileOutputStream; import java.io.PrintWriter; import java.io.StringWriter; import java.io.Writer; import java.lang.Thread.UncaughtExceptionHandler; import java.lang.reflect.Field; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.Date; import java.util.HashMap; import java.util.Map; /** * UncaughtException处理类,当程序发生Uncaught异常的时候,有该类来接管程序,并记录发送错误报告. * * @author user */ @SuppressLint("SdCardPath") public class CrashHandler implements UncaughtExceptionHandler { // public static final String TAG = "CrashHandler"; // 系统默认的UncaughtException处理类 private Thread.UncaughtExceptionHandler mDefaultHandler; // CrashHandler实例 private static CrashHandler INSTANCE = new CrashHandler(); // 程序的Context对象 private Context mContext; // 用来存储设备信息和异常信息 private Map<String, String> infos = new HashMap<String, String>(); private String appName = ""; // 用于格式化日期,作为日志文件名的一部分 @SuppressLint("SimpleDateFormat") private DateFormat formatter = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss"); // private IDoSomethingWhenCrash doSomethingWhenCrash; /** * 保证只有一个CrashHandler实例 */ public CrashHandler() { } /** * 获取CrashHandler实例 ,单例模式 */ public static CrashHandler getInstance() { return INSTANCE; } /** * 初始化 * * @param context */ public void init(Context context, String appName) { init(context); this.appName = appName; } /** * 初始化 * * @param context */ public void init(Context context) { ZogUtils.printLog(CrashHandler.class, "init CrashHandler"); mContext = context; // 获取系统默认的UncaughtException处理器 mDefaultHandler = Thread.getDefaultUncaughtExceptionHandler(); // 设置该CrashHandler为程序的默认处理器 Thread.setDefaultUncaughtExceptionHandler(this); } /** * 当UncaughtException发生时会转入该函数来处理 */ @Override public void uncaughtException(Thread thread, Throwable ex) { ZogUtils.printLog(CrashHandler.class, "App " + appName + " crash"); if (handleException(ex)) { new Thread() { @Override public void run() { Looper.prepare(); AppUtils.closeApp(mContext); Looper.loop(); } }.start(); } else { if (mDefaultHandler != null) // 如果用户没有处理则让系统默认的异常处理器来处理 mDefaultHandler.uncaughtException(thread, ex); } } /** * 自定义错误处理,收集错误信息 发送错误报告等操作均在此完成. * * @param ex * @return true:如果处理了该异常信息;否则返回false. */ private boolean handleException(Throwable ex) { if (ex == null) { return false; } // 使用Toast来显示异常信息 new Thread() { @Override public void run() { Looper.prepare(); Toast.makeText(mContext, "很抱歉,程序出现异常,即将退出.", Toast.LENGTH_LONG) .show(); Looper.loop(); } }.start(); // 收集设备参数信息 collectDeviceInfo(mContext); // 保存日志文件 saveCrashInfo2File(ex); //做一些其他的事 doSomething(); return true; } /** * 收集设备参数信息 * * @param ctx */ public void collectDeviceInfo(Context ctx) { try { PackageManager pm = ctx.getPackageManager(); PackageInfo pi = pm.getPackageInfo(ctx.getPackageName(), PackageManager.GET_ACTIVITIES); if (pi != null) { String versionName = pi.versionName == null ? "null" : pi.versionName; String versionCode = pi.versionCode + ""; infos.put("versionName", versionName); infos.put("versionCode", versionCode); } } catch (NameNotFoundException e) { ZogUtils.printLog(CrashHandler.class, "an error occured when collect package info"); ZogUtils.showException(e); } Field[] fields = Build.class.getDeclaredFields(); for (Field field : fields) { try { field.setAccessible(true); infos.put(field.getName(), field.get(null).toString()); // LogUtils.printLog(CrashHandler.class, // field.getName() + " : " + field.get(null)); } catch (Exception e) { ZogUtils.printLog(CrashHandler.class, "an error occured when collect crash info" + e); } } } /** * 保存错误信息到文件中 * * @param ex * @return 返回文件名称, 便于将文件传送到服务器 */ private String saveCrashInfo2File(Throwable ex) { StringBuffer sb = new StringBuffer(); for (Map.Entry<String, String> entry : infos.entrySet()) { String key = entry.getKey(); String value = entry.getValue(); sb.append(key + "=" + value + "\n"); } Writer writer = new StringWriter(); PrintWriter printWriter = new PrintWriter(writer); ex.printStackTrace(printWriter); Throwable cause = ex.getCause(); while (cause != null) { cause.printStackTrace(printWriter); cause = cause.getCause(); } printWriter.close(); String result = writer.toString(); sb.append(result); ZogUtils.printLog(CrashHandler.class, "\n" + sb.toString()); try { long timestamp = System.currentTimeMillis(); String time = formatter.format(new Date()); String fileName = time + "-" + timestamp + ".log"; if (Environment.getExternalStorageState().equals( Environment.MEDIA_MOUNTED)) { String path = AppConfig.CACHE_DATA_DIR +"crash/" + (StringUtils.isNullOrEmpty(appName) ? "" : appName + "/"); File dir = new File(path); if (!dir.exists()) { dir.mkdirs(); } FileOutputStream fos = new FileOutputStream(path + fileName); fos.write(sb.toString().getBytes()); fos.close(); } return fileName; } catch (Exception e) { ZogUtils.printLog(CrashHandler.class, "an error occured while writing file..."); ZogUtils.showException(e); } return null; } /** * 继承的类重写doSomething,来做其他的事 */ public void doSomething() { } }
apache-2.0
porcelli-forks/kie-wb-common
kie-wb-common-stunner/kie-wb-common-stunner-sets/kie-wb-common-stunner-bpmn/kie-wb-common-stunner-bpmn-marshalling/src/main/java/org/kie/workbench/common/stunner/bpmn/client/marshall/converters/fromstunner/properties/BoundaryEventPropertyWriter.java
3446
/* * Copyright 2019 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.workbench.common.stunner.bpmn.client.marshall.converters.fromstunner.properties; import java.util.Optional; import org.eclipse.bpmn2.BoundaryEvent; import org.eclipse.bpmn2.EventDefinition; import org.kie.workbench.common.stunner.bpmn.client.marshall.converters.customproperties.CustomAttribute; import org.kie.workbench.common.stunner.core.graph.Edge; import org.kie.workbench.common.stunner.core.graph.Node; import org.kie.workbench.common.stunner.core.graph.content.Bound; import org.kie.workbench.common.stunner.core.graph.content.Bounds; import org.kie.workbench.common.stunner.core.graph.content.view.Point2D; import org.kie.workbench.common.stunner.core.graph.content.view.View; import static org.kie.workbench.common.stunner.bpmn.client.marshall.converters.fromstunner.properties.util.PropertyWriterUtils.getDockSourceNode; public class BoundaryEventPropertyWriter extends CatchEventPropertyWriter { private final BoundaryEvent event; public BoundaryEventPropertyWriter(BoundaryEvent event, VariableScope variableScope) { super(event, variableScope); this.event = event; } @Override public void setCancelActivity(Boolean value) { CustomAttribute.boundarycaForEvent.of(flowElement).set(value); event.setCancelActivity(value); } public void setParentActivity(ActivityPropertyWriter parent) { event.setAttachedToRef(parent.getFlowElement()); } @Override public void addEventDefinition(EventDefinition eventDefinition) { this.event.getEventDefinitions().add(eventDefinition); } @Override public void setAbsoluteBounds(Node<? extends View, ?> node) { Bound ul = node.getContent().getBounds().getUpperLeft(); //docker information is relative setDockerInfo(Point2D.create(ul.getX(), ul.getY())); Optional<Node<View, Edge>> dockSourceNode = getDockSourceNode(node); if (dockSourceNode.isPresent()) { //docked node bounds are relative to the dockSourceNode in Stunner, but not in bpmn2 standard so the node //absolute bounds must be calculated by using hte dockSourceNode absolute coordinates. Bounds dockSourceNodeBounds = absoluteBounds(dockSourceNode.get()); Bounds nodeBounds = node.getContent().getBounds(); double x = dockSourceNodeBounds.getX() + nodeBounds.getUpperLeft().getX(); double y = dockSourceNodeBounds.getY() + nodeBounds.getUpperLeft().getY(); super.setBounds(Bounds.create(x, y, x + nodeBounds.getWidth(), y + nodeBounds.getHeight())); } else { //uncommon case super.setAbsoluteBounds(node); } } private void setDockerInfo(Point2D docker) { CustomAttribute.dockerInfo.of(event).set(docker); } }
apache-2.0
efortuna/AndroidSDKClone
sdk/samples/android-20/sensors/BatchStepSensor/BatchStepSensorSample/src/main/java/com/example/android/batchstepsensor/cardstream/CardStreamState.java
1204
/* * Copyright 2013 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.example.android.batchstepsensor.cardstream; import java.util.HashSet; /** * A struct object that holds the state of a {@link CardStreamFragment}. */ public class CardStreamState { protected Card[] visibleCards; protected Card[] hiddenCards; protected HashSet<String> dismissibleCards; protected String shownTag; protected CardStreamState(Card[] visible, Card[] hidden, HashSet<String> dismissible, String shownTag) { visibleCards = visible; hiddenCards = hidden; dismissibleCards = dismissible; this.shownTag = shownTag; } }
apache-2.0
apache/sis
core/sis-metadata/src/main/java/org/apache/sis/metadata/iso/identification/DefaultOperationChainMetadata.java
8711
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.sis.metadata.iso.identification; import java.util.List; import javax.xml.bind.annotation.XmlType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; import org.opengis.util.InternationalString; import org.apache.sis.metadata.iso.ISOMetadata; import org.apache.sis.metadata.TitleProperty; import org.apache.sis.util.iso.Types; import org.apache.sis.xml.Namespaces; // Branch-specific imports import org.opengis.annotation.UML; import static org.opengis.annotation.Obligation.OPTIONAL; import static org.opengis.annotation.Obligation.MANDATORY; import static org.opengis.annotation.Specification.ISO_19115; /** * Operation chain information. * The following properties are mandatory in a well-formed metadata according ISO 19115: * * <div class="preformat">{@code SV_OperationChainMetadata} * {@code   ├─name………………………………………………………………………………} The name as used by the service for this chain. * {@code   └─operation…………………………………………………………………} Information about the operations applied by the chain. * {@code       ├─operationName……………………………………………} A unique identifier for this interface. * {@code       ├─distributedComputingPlatform……} Distributed computing platforms on which the operation has been implemented. * {@code       └─connectPoint………………………………………………} Handle for accessing the service interface. * {@code           └─linkage…………………………………………………} Location for on-line access using a URL address or similar addressing scheme.</div> * * <div class="warning"><b>Note on International Standard versions</b><br> * This class is derived from a new type defined in the ISO 19115 international standard published in 2014, * while GeoAPI 3.0 is based on the version published in 2003. Consequently this implementation class does * not yet implement a GeoAPI interface, but is expected to do so after the next GeoAPI releases. * When the interface will become available, all references to this implementation class in Apache SIS will * be replaced be references to the {@code OperationChainMetadata} interface. * </div> * * <h2>Limitations</h2> * <ul> * <li>Instances of this class are not synchronized for multi-threading. * Synchronization, if needed, is caller's responsibility.</li> * <li>Serialized objects of this class are not guaranteed to be compatible with future Apache SIS releases. * Serialization support is appropriate for short term storage or RMI between applications running the * same version of Apache SIS. For long term storage, use {@link org.apache.sis.xml.XML} instead.</li> * </ul> * * @author Rémi Maréchal (Geomatys) * @author Martin Desruisseaux (Geomatys) * @version 1.0 * @since 0.5 * @module */ @TitleProperty(name = "name") @XmlType(name = "SV_OperationChainMetadata_Type", namespace = Namespaces.SRV, propOrder = { "name", "description", "operations" }) @XmlRootElement(name = "SV_OperationChainMetadata", namespace = Namespaces.SRV) @UML(identifier="SV_OperationChainMetadata", specification=ISO_19115) public class DefaultOperationChainMetadata extends ISOMetadata { /** * Serial number for compatibility with different versions. */ private static final long serialVersionUID = 4132508877114835286L; /** * The name as used by the service for this chain. */ private InternationalString name; /** * A narrative explanation of the services in the chain and resulting output. */ private InternationalString description; /** * Information about the operations applied by the chain. */ private List<DefaultOperationMetadata> operations; /** * Constructs an initially empty operation chain metadata. */ public DefaultOperationChainMetadata() { } /** * Constructs a new operation chain metadata initialized to the specified name. * * @param name the name as used by the service for this chain. */ public DefaultOperationChainMetadata(final CharSequence name) { this.name = Types.toInternationalString(name); } /** * Constructs a new instance initialized with the values from the specified metadata object. * This is a <cite>shallow</cite> copy constructor, since the other metadata contained in the * given object are not recursively copied. * * @param object the metadata to copy values from, or {@code null} if none. */ public DefaultOperationChainMetadata(final DefaultOperationChainMetadata object) { super(object); if (object != null) { this.name = object.getName(); this.description = object.getDescription(); this.operations = copyList(object.getOperations(), DefaultOperationMetadata.class); } } /** * Returns the name as used by the service for this chain. * * @return name as used by the service for this chain. */ @XmlElement(name = "name", namespace = Namespaces.SRV, required = true) @UML(identifier="name", obligation=MANDATORY, specification=ISO_19115) public InternationalString getName() { return name; } /** * Sets the name used by the service for this chain. * * @param newValue the new name used by the service for this chain. */ public void setName(final InternationalString newValue) { checkWritePermission(name); name = newValue; } /** * Returns a narrative explanation of the services in the chain and resulting output. * * @return narrative explanation of the services in the chain and resulting output, or {@code null} if none. */ @XmlElement(name = "description", namespace = Namespaces.SRV) @UML(identifier="description", obligation=OPTIONAL, specification=ISO_19115) public InternationalString getDescription() { return description; } /** * Sets the narrative explanation of the services in the chain and resulting output. * * @param newValue the new a narrative explanation of the services in the chain and resulting output */ public void setDescription(final InternationalString newValue) { checkWritePermission(description); description = newValue; } /** * Returns information about the operations applied by the chain. * * <div class="warning"><b>Upcoming API change — generalization</b><br> * The element type will be changed to the {@code OperationMetadata} interface * when GeoAPI will provide it (tentatively in GeoAPI 3.1). * </div> * * @return information about the operations applied by the chain. */ @XmlElement(name = "operation", namespace = Namespaces.SRV, required = true) @UML(identifier="operation", obligation=MANDATORY, specification=ISO_19115) public List<DefaultOperationMetadata> getOperations() { return operations = nonNullList(operations, DefaultOperationMetadata.class); } /** * Sets the information about the operations applied by the chain. * * <div class="warning"><b>Upcoming API change — generalization</b><br> * The element type will be changed to the {@code OperationMetadata} interface * when GeoAPI will provide it (tentatively in GeoAPI 3.1). * </div> * * @param newValues the new information about the operations applied by the chain. */ public void setOperations(final List<? extends DefaultOperationMetadata> newValues) { operations = writeList(newValues, operations, DefaultOperationMetadata.class); } }
apache-2.0
kavin256/Derby
java/engine/org/apache/derby/impl/jdbc/EmbedSQLException.java
3788
/* Derby - Class org.apache.derby.impl.jdbc.EmbedSQLException Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.derby.impl.jdbc; import org.apache.derby.iapi.error.DerbySQLException; import java.sql.SQLException; import org.apache.derby.iapi.error.StandardException; /** This class is what gets send over the wire in client/server configuration. When running embedded, this has the detailed stack trace for exceptions. In case of client/server, server has all the stack trace information but client doesn't get the stack trace, just the sql exception. The reason for this implementation is the stack trace information is more relevant on the server side and it also decreases the size of client jar file tremendously. */ public class EmbedSQLException extends SQLException implements DerbySQLException { private transient Object[] arguments; private String messageId; /** * Because SQLException does not have settable fields, * the caller of the constructor must do message lookup, * and pass the appropriate values here for message, messageId, * and next exception. */ EmbedSQLException(String message, String messageId, SQLException nextException, int severity, Throwable t, Object[] args) { super(message, StandardException.getSQLStateFromIdentifier(messageId), severity); this.messageId = messageId; arguments = args; if (nextException !=null) this.setNextException(nextException); // if no cause has been specified, let nextException be the cause (this // improves error reporting since the cause is included in the output // from printStackTrace()) if (t == null) { t = nextException; } if (t != null) { initCause(t); } } public String getMessageId() { return messageId; } public Object[] getArguments() { return arguments; } /* ** Methods of Object */ /** Override Throwables toString() to avoid the class name appearing in the message. */ public String toString() { // We use java.sql.SQLException rather than the default toString(), // which returns org.apache.derby.impl.jdbc.EmbedSQLException, so // that (a) we're not exposing an internal class name and (b) so // this is consistent with the network client, where SQLExceptions // are vanilla java.sql classes and not our own subclass return "java.sql.SQLException: " + getMessage(); } /* ** Some hack methods for 3.0.1. These will get cleaned up in main ** with the exception re-work. */ private transient boolean simpleWrapper; public static SQLException wrapStandardException(String message, String messageId, int code, Throwable se) { EmbedSQLException csqle = new EmbedSQLException(message, messageId, (SQLException) null, code, se, (se instanceof StandardException) ? ((StandardException)se).getArguments() : null); csqle.simpleWrapper = true; return csqle; } public boolean isSimpleWrapper() { if (getNextException() != null) { return false; } return simpleWrapper; } }
apache-2.0
bvelivala/SampleInsurance
src/main/java/com/example/Application.java
634
package com.example; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.boot.builder.SpringApplicationBuilder; import org.springframework.boot.web.support.SpringBootServletInitializer; @SpringBootApplication public class Application extends SpringBootServletInitializer{ @Override protected SpringApplicationBuilder configure(SpringApplicationBuilder application) { return application.sources(Application.class); } public static void main(String[] args) { SpringApplication.run(Application.class, args); } }
apache-2.0
AbleOne/link-rest
agrest/src/test/java/io/agrest/DataResponseIT.java
3867
package io.agrest; import static java.util.stream.Collectors.joining; import static org.junit.Assert.assertEquals; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import java.util.Collection; import javax.ws.rs.core.MultivaluedHashMap; import javax.ws.rs.core.UriInfo; import io.agrest.encoder.EncoderITBase; import io.agrest.it.fixture.cayenne.E2; import io.agrest.it.fixture.cayenne.E3; import org.apache.cayenne.Cayenne; import org.apache.cayenne.Persistent; import org.junit.Test; public class DataResponseIT extends EncoderITBase { private String toIdsString(Collection<? extends Persistent> objects) { return objects.stream().map(o -> o.getObjectId().getEntityName() + ":" + Cayenne.intPKForObject(o)) .collect(joining(";")); } @Test public void testGetIncludedObjects_Root_NoLimits() { DB.insert("e2", "id, name", "1, 'xxx'"); DB.insert("e2", "id, name", "2, 'yyy'"); DB.insert("e2", "id, name", "3, 'zzz'"); DataResponse<E2> response = createAgService().select(E2.class).get(); Collection<E2> objects = response.getIncludedObjects(E2.class, ""); assertEquals("E2:1;E2:2;E2:3", toIdsString(objects)); } @Test public void testGetIncludedObjects_Root_MapBy() { DB.insert("e2", "id, name", "1, 'xxx'"); DB.insert("e2", "id, name", "2, 'yyy'"); DB.insert("e2", "id, name", "3, 'zzz'"); MultivaluedHashMap<String, String> params = new MultivaluedHashMap<>(); params.putSingle("mapBy", "name"); UriInfo mockUri = mock(UriInfo.class); when(mockUri.getQueryParameters()).thenReturn(params); DataResponse<E2> response = createAgService().select(E2.class).uri(mockUri).get(); Collection<E2> objects = response.getIncludedObjects(E2.class, ""); assertEquals("E2:1;E2:2;E2:3", toIdsString(objects)); } @Test public void testGetIncludedObjects_Root_StartLimit() { DB.insert("e2", "id, name", "1, 'xxx'"); DB.insert("e2", "id, name", "2, 'yyy'"); DB.insert("e2", "id, name", "3, 'zzz'"); DB.insert("e2", "id, name", "4, 'zzz'"); MultivaluedHashMap<String, String> params = new MultivaluedHashMap<>(); params.putSingle("sort", "id"); params.putSingle("start", "1"); params.putSingle("limit", "2"); UriInfo mockUri = mock(UriInfo.class); when(mockUri.getQueryParameters()).thenReturn(params); DataResponse<E2> response = createAgService().select(E2.class).uri(mockUri).get(); Collection<E2> objects = response.getIncludedObjects(E2.class, ""); assertEquals("E2:2;E2:3", toIdsString(objects)); } @Test public void testGetIncludedObjects_Related() { DB.insert("e2", "id, name", "1, 'xxx'"); DB.insert("e2", "id, name", "2, 'yyy'"); DB.insert("e2", "id, name", "3, 'zzz'"); DB.insert("e3", "id, e2_id, name", "7, 2, 'zzz'"); DB.insert("e3", "id, e2_id, name", "8, 1, 'yyy'"); DB.insert("e3", "id, e2_id, name", "9, 1, 'zzz'"); MultivaluedHashMap<String, String> params = new MultivaluedHashMap<>(); params.putSingle("include", "{\"path\":\"e3s\",\"sort\":\"id\"}"); UriInfo mockUri = mock(UriInfo.class); when(mockUri.getQueryParameters()).thenReturn(params); DataResponse<E2> response = createAgService().select(E2.class).uri(mockUri).get(); Collection<E3> objects = response.getIncludedObjects(E3.class, "e3s"); assertEquals("E3:8;E3:9;E3:7", toIdsString(objects)); } @Test public void testGetIncludedObjects_MissingPath() { DB.insert("e2", "id, name", "1, 'xxx'"); DB.insert("e2", "id, name", "2, 'yyy'"); DB.insert("e2", "id, name", "3, 'zzz'"); DB.insert("e3", "id, e2_id, name", "7, 2, 'zzz'"); DB.insert("e3", "id, e2_id, name", "8, 1, 'yyy'"); DB.insert("e3", "id, e2_id, name", "9, 1, 'zzz'"); DataResponse<E2> response = createAgService().select(E2.class).get(); Collection<E3> objects = response.getIncludedObjects(E3.class, "e3s"); assertEquals("", toIdsString(objects)); } }
apache-2.0
anshuiisc/storm-Allbolts-wiring
storm-core/src/jvm/org/apache/storm/windowing/TupleWindowImpl.java
2679
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.storm.windowing; import org.apache.storm.tuple.Tuple; import java.util.List; /** * Holds the expired, new and current tuples in a window. */ public class TupleWindowImpl implements TupleWindow { private final List<Tuple> tuples; private final List<Tuple> newTuples; private final List<Tuple> expiredTuples; public TupleWindowImpl(List<Tuple> tuples, List<Tuple> newTuples, List<Tuple> expiredTuples) { this.tuples = tuples; this.newTuples = newTuples; this.expiredTuples = expiredTuples; } @Override public List<Tuple> get() { return tuples; } @Override public List<Tuple> getNew() { return newTuples; } @Override public List<Tuple> getExpired() { return expiredTuples; } @Override public String toString() { return "TupleWindowImpl{" + "tuples=" + tuples + ", newTuples=" + newTuples + ", expiredTuples=" + expiredTuples + '}'; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; TupleWindowImpl that = (TupleWindowImpl) o; if (tuples != null ? !tuples.equals(that.tuples) : that.tuples != null) return false; if (newTuples != null ? !newTuples.equals(that.newTuples) : that.newTuples != null) return false; return expiredTuples != null ? expiredTuples.equals(that.expiredTuples) : that.expiredTuples == null; } @Override public int hashCode() { int result = tuples != null ? tuples.hashCode() : 0; result = 31 * result + (newTuples != null ? newTuples.hashCode() : 0); result = 31 * result + (expiredTuples != null ? expiredTuples.hashCode() : 0); return result; } }
apache-2.0
liu-jerry/jvm
jvm/src/main/java/org/oyach/jvm/parse/attribute/annotation/AnnotationElementValue.java
291
package org.oyach.jvm.parse.attribute.annotation; public class AnnotationElementValue extends ElementValue { private final Annotation annotation; protected AnnotationElementValue(char tag, Annotation annotation) { super(tag); this.annotation = annotation; } }
apache-2.0
apache/commons-functor
core/src/test/java/org/apache/commons/functor/generator/loop/TestGenerateUntil.java
4063
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.functor.generator.loop; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import org.apache.commons.functor.Predicate; import org.apache.commons.functor.generator.Generator; import org.apache.commons.functor.generator.loop.GenerateUntil; import org.apache.commons.functor.range.IntegerRange; import org.junit.After; import org.junit.Before; import org.junit.Test; /** * Tests the Generate Until class. * @version $Revision: 1508677 $ $Date: 2013-07-30 19:48:02 -0300 (Tue, 30 Jul 2013) $ */ public class TestGenerateUntil { @Before public void setUp() throws Exception { wrappedGenerator = IteratorToGeneratorAdapter.adapt(new IntegerRange(1, 10)); generateUntil = new GenerateUntil<Integer>(wrappedGenerator, isMoreThanFive); } @After public void tearDown() { wrappedGenerator = null; isMoreThanFive = null; generateUntil = null; } // Tests // ------------------------------------------------------------------------ @Test(expected=NullPointerException.class) public void testConstructorProhibitsNullWrappedPredicate() { new GenerateUntil<Integer>(generateUntil, null); } @Test(expected=NullPointerException.class) public void testConstructorProhibitsNullGenerator() { new GenerateUntil<Integer>(null, isMoreThanFive); } @Test(expected=NullPointerException.class) public void testConstructorProhibitsNullGeneratorOrNullWrappedPredicate() { new GenerateUntil<Integer>(null, null); } @Test public void testEquals() { Generator<Integer> anotherGenerate = new GenerateUntil<Integer>( IteratorToGeneratorAdapter.adapt(new IntegerRange(1, 10)), isMoreThanFive); assertEquals(generateUntil, generateUntil); assertEquals(generateUntil, anotherGenerate); assertTrue(!generateUntil.equals((GenerateUntil<Integer>)null)); Generator<Integer> aGenerateWithADifferentPredicate = new GenerateUntil<Integer>( IteratorToGeneratorAdapter.adapt(new IntegerRange(1, 10)), new Predicate<Integer>() { public boolean test(Integer obj) { return obj > FIVE; } }); assertTrue(!generateUntil.equals(aGenerateWithADifferentPredicate)); Generator<Integer> aGenerateWithADifferentWrapped = new GenerateUntil<Integer>( IteratorToGeneratorAdapter.adapt(new IntegerRange(1,2)), isMoreThanFive); assertTrue(!generateUntil.equals(aGenerateWithADifferentWrapped)); } @Test public void testHashcode() { assertEquals(generateUntil.hashCode(), generateUntil.hashCode()); assertEquals(generateUntil.hashCode(), new GenerateUntil<Integer>(wrappedGenerator, isMoreThanFive).hashCode()); } // Attributes // ------------------------------------------------------------------------ private static final Integer FIVE = Integer.valueOf(5); private Generator<Integer> wrappedGenerator = null; private Predicate<Integer> isMoreThanFive = new Predicate<Integer>() { public boolean test( Integer obj ) { return obj > FIVE; } }; private Generator<Integer> generateUntil = null; }
apache-2.0
ua-eas/ksd-kc5.2.1-rice2.3.6-ua
rice-middleware/impl/src/main/java/org/kuali/rice/kns/datadictionary/exporter/MaintenanceDocumentEntryMapper.java
7206
/** * Copyright 2005-2014 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl2.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.rice.kns.datadictionary.exporter; import java.util.Iterator; import org.apache.commons.lang.StringUtils; import org.kuali.rice.kew.api.doctype.DocumentType; import org.kuali.rice.kns.datadictionary.MaintainableCollectionDefinition; import org.kuali.rice.kns.datadictionary.MaintainableFieldDefinition; import org.kuali.rice.kns.datadictionary.MaintainableItemDefinition; import org.kuali.rice.kns.datadictionary.MaintainableSectionDefinition; import org.kuali.rice.kns.datadictionary.MaintainableSubSectionHeaderDefinition; import org.kuali.rice.kns.datadictionary.MaintenanceDocumentEntry; import org.kuali.rice.kns.service.DocumentHelperService; import org.kuali.rice.kns.service.KNSServiceLocator; import org.kuali.rice.krad.datadictionary.exporter.ExportMap; import org.kuali.rice.krad.service.KRADServiceLocatorWeb; /** * MaintenanceDocumentEntryMapper * * */ @Deprecated public class MaintenanceDocumentEntryMapper extends DocumentEntryMapper { /** * Default constructor */ public MaintenanceDocumentEntryMapper() { } /** * @param entry * @return Map containing a String- and Map-based representation of the given entry */ public ExportMap mapEntry(MaintenanceDocumentEntry entry) { // simple properties ExportMap entryMap = new ExportMap(entry.getJstlKey()); Class businessRulesClass = entry.getBusinessRulesClass(); if (businessRulesClass != null) { entryMap.set("businessRulesClass", businessRulesClass.getName()); } entryMap.set("documentTypeName", entry.getDocumentTypeName()); DocumentType docType = getDocumentType(entry.getDocumentTypeName()); entryMap.set("label", docType.getLabel()); if (docType.getDescription() != null) { entryMap.set("description", docType.getDescription()); } DocumentHelperService documentHelperService = KNSServiceLocator.getDocumentHelperService(); entryMap.set("documentAuthorizerClass", documentHelperService.getDocumentAuthorizer(entry.getDocumentTypeName()).getClass().getName()); entryMap.set("documentPresentationControllerClass", documentHelperService.getDocumentPresentationController(entry.getDocumentTypeName()).getClass().getName()); entryMap.set("allowsNoteAttachments", Boolean.toString(entry.getAllowsNoteAttachments())); entryMap.set("allowsNoteFYI", Boolean.toString(entry.getAllowsNoteFYI())); if (entry.getAttachmentTypesValuesFinderClass() != null) { entryMap.set("attachmentTypesValuesFinderClass", entry.getAttachmentTypesValuesFinderClass().getName()); } entryMap.set("displayTopicFieldInNotes", Boolean.toString(entry.getDisplayTopicFieldInNotes())); entryMap.set("usePessimisticLocking", Boolean.toString(entry.getUsePessimisticLocking())); entryMap.set("useWorkflowPessimisticLocking", Boolean.toString(entry.getUseWorkflowPessimisticLocking())); entryMap.set("sessionDocument", Boolean.toString(entry.isSessionDocument())); entryMap.set(new AttributesMapBuilder().buildAttributesMap(entry)); entryMap.set(new CollectionsMapBuilder().buildCollectionsMap(entry)); // simple properties entryMap.set("maintenanceDocument", "true"); entryMap.set("dataObjectClass", entry.getBusinessObjectClass().getName()); entryMap.set("maintainableClass", entry.getMaintainableClass().getName()); // complex properties entryMap.set(buildMaintainableSectionsMap(entry)); return entryMap; } private ExportMap buildMaintainableSectionsMap(MaintenanceDocumentEntry entry) { ExportMap maintainableSectionsMap = new ExportMap("maintainableSections"); int index = 0; for (Iterator i = entry.getMaintainableSections().iterator(); i.hasNext();) { MaintainableSectionDefinition section = (MaintainableSectionDefinition) i.next(); maintainableSectionsMap.set(buildMaintainableSectionMap(section, index++)); } return maintainableSectionsMap; } private ExportMap buildMaintainableSectionMap(MaintainableSectionDefinition section, int index) { ExportMap sectionMap = new ExportMap(Integer.toString(index)); sectionMap.set("index", Integer.toString(index)); sectionMap.set("title", section.getTitle()); sectionMap.set(buildMaintainableItemsMap(section)); return sectionMap; } private ExportMap buildMaintainableItemsMap(MaintainableSectionDefinition section) { ExportMap itemsMap = new ExportMap("maintainableItems"); for (Iterator i = section.getMaintainableItems().iterator(); i.hasNext();) { MaintainableItemDefinition item = (MaintainableItemDefinition) i.next(); itemsMap.set(buildMaintainableItemMap(item)); } return itemsMap; } private ExportMap buildMaintainableItemMap(MaintainableItemDefinition item) { ExportMap itemMap = new ExportMap(item.getName()); if (item instanceof MaintainableFieldDefinition) { MaintainableFieldDefinition field = (MaintainableFieldDefinition) item; itemMap.set("field", "true"); itemMap.set("name", field.getName()); itemMap.set("required", Boolean.toString(field.isRequired())); if (StringUtils.isNotBlank(field.getAlternateDisplayAttributeName())) { itemMap.set("alternateDisplayAttributeName", field.getAlternateDisplayAttributeName()); } if (StringUtils.isNotBlank(field.getAdditionalDisplayAttributeName())) { itemMap.set("additionalDisplayAttributeName", field.getAdditionalDisplayAttributeName()); } } else if (item instanceof MaintainableCollectionDefinition) { MaintainableCollectionDefinition collection = (MaintainableCollectionDefinition) item; itemMap.set("collection", "true"); itemMap.set("name", collection.getName()); itemMap.set("dataObjectClass", collection.getBusinessObjectClass().getName()); } else if (item instanceof MaintainableSubSectionHeaderDefinition) { MaintainableSubSectionHeaderDefinition subSectionHeader = (MaintainableSubSectionHeaderDefinition) item; itemMap.set("name", subSectionHeader.getName()); } else { throw new IllegalStateException("unable to create itemMap for unknown MaintainableItem subclass '" + item.getClass().getName() + "'"); } return itemMap; } }
apache-2.0
googleapis/google-api-java-client-services
clients/google-api-services-iam/v1/1.27.0/com/google/api/services/iam/v1/model/Binding.java
6709
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.iam.v1.model; /** * Associates `members` with a `role`. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Identity and Access Management (IAM) API. For a * detailed explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class Binding extends com.google.api.client.json.GenericJson { /** * The condition that is associated with this binding. NOTE: An unsatisfied condition will not * allow user access via current binding. Different bindings, including their conditions, are * examined independently. * The value may be {@code null}. */ @com.google.api.client.util.Key private Expr condition; /** * Specifies the identities requesting access for a Cloud Platform resource. `members` can have * the following values: * * * `allUsers`: A special identifier that represents anyone who is on the internet; with or * without a Google account. * * * `allAuthenticatedUsers`: A special identifier that represents anyone who is authenticated * with a Google account or a service account. * * * `user:{emailid}`: An email address that represents a specific Google account. For example, * `alice@gmail.com` . * * * `serviceAccount:{emailid}`: An email address that represents a service account. For * example, `my-other-app@appspot.gserviceaccount.com`. * * * `group:{emailid}`: An email address that represents a Google group. For example, * `admins@example.com`. * * * `domain:{domain}`: The G Suite domain (primary) that represents all the users of that * domain. For example, `google.com` or `example.com`. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> members; /** * Role that is assigned to `members`. For example, `roles/viewer`, `roles/editor`, or * `roles/owner`. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String role; /** * The condition that is associated with this binding. NOTE: An unsatisfied condition will not * allow user access via current binding. Different bindings, including their conditions, are * examined independently. * @return value or {@code null} for none */ public Expr getCondition() { return condition; } /** * The condition that is associated with this binding. NOTE: An unsatisfied condition will not * allow user access via current binding. Different bindings, including their conditions, are * examined independently. * @param condition condition or {@code null} for none */ public Binding setCondition(Expr condition) { this.condition = condition; return this; } /** * Specifies the identities requesting access for a Cloud Platform resource. `members` can have * the following values: * * * `allUsers`: A special identifier that represents anyone who is on the internet; with or * without a Google account. * * * `allAuthenticatedUsers`: A special identifier that represents anyone who is authenticated * with a Google account or a service account. * * * `user:{emailid}`: An email address that represents a specific Google account. For example, * `alice@gmail.com` . * * * `serviceAccount:{emailid}`: An email address that represents a service account. For * example, `my-other-app@appspot.gserviceaccount.com`. * * * `group:{emailid}`: An email address that represents a Google group. For example, * `admins@example.com`. * * * `domain:{domain}`: The G Suite domain (primary) that represents all the users of that * domain. For example, `google.com` or `example.com`. * @return value or {@code null} for none */ public java.util.List<java.lang.String> getMembers() { return members; } /** * Specifies the identities requesting access for a Cloud Platform resource. `members` can have * the following values: * * * `allUsers`: A special identifier that represents anyone who is on the internet; with or * without a Google account. * * * `allAuthenticatedUsers`: A special identifier that represents anyone who is authenticated * with a Google account or a service account. * * * `user:{emailid}`: An email address that represents a specific Google account. For example, * `alice@gmail.com` . * * * `serviceAccount:{emailid}`: An email address that represents a service account. For * example, `my-other-app@appspot.gserviceaccount.com`. * * * `group:{emailid}`: An email address that represents a Google group. For example, * `admins@example.com`. * * * `domain:{domain}`: The G Suite domain (primary) that represents all the users of that * domain. For example, `google.com` or `example.com`. * @param members members or {@code null} for none */ public Binding setMembers(java.util.List<java.lang.String> members) { this.members = members; return this; } /** * Role that is assigned to `members`. For example, `roles/viewer`, `roles/editor`, or * `roles/owner`. * @return value or {@code null} for none */ public java.lang.String getRole() { return role; } /** * Role that is assigned to `members`. For example, `roles/viewer`, `roles/editor`, or * `roles/owner`. * @param role role or {@code null} for none */ public Binding setRole(java.lang.String role) { this.role = role; return this; } @Override public Binding set(String fieldName, Object value) { return (Binding) super.set(fieldName, value); } @Override public Binding clone() { return (Binding) super.clone(); } }
apache-2.0
googleapis/google-api-java-client-services
clients/google-api-services-ml/v1/1.27.0/com/google/api/services/ml/v1/model/GoogleIamV1Binding.java
6815
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.ml.v1.model; /** * Associates `members` with a `role`. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Cloud Machine Learning Engine. For a detailed * explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class GoogleIamV1Binding extends com.google.api.client.json.GenericJson { /** * The condition that is associated with this binding. NOTE: An unsatisfied condition will not * allow user access via current binding. Different bindings, including their conditions, are * examined independently. * The value may be {@code null}. */ @com.google.api.client.util.Key private GoogleTypeExpr condition; /** * Specifies the identities requesting access for a Cloud Platform resource. `members` can have * the following values: * * * `allUsers`: A special identifier that represents anyone who is on the internet; with or * without a Google account. * * * `allAuthenticatedUsers`: A special identifier that represents anyone who is authenticated * with a Google account or a service account. * * * `user:{emailid}`: An email address that represents a specific Google account. For example, * `alice@gmail.com` . * * * `serviceAccount:{emailid}`: An email address that represents a service account. For * example, `my-other-app@appspot.gserviceaccount.com`. * * * `group:{emailid}`: An email address that represents a Google group. For example, * `admins@example.com`. * * * `domain:{domain}`: The G Suite domain (primary) that represents all the users of that * domain. For example, `google.com` or `example.com`. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> members; /** * Role that is assigned to `members`. For example, `roles/viewer`, `roles/editor`, or * `roles/owner`. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String role; /** * The condition that is associated with this binding. NOTE: An unsatisfied condition will not * allow user access via current binding. Different bindings, including their conditions, are * examined independently. * @return value or {@code null} for none */ public GoogleTypeExpr getCondition() { return condition; } /** * The condition that is associated with this binding. NOTE: An unsatisfied condition will not * allow user access via current binding. Different bindings, including their conditions, are * examined independently. * @param condition condition or {@code null} for none */ public GoogleIamV1Binding setCondition(GoogleTypeExpr condition) { this.condition = condition; return this; } /** * Specifies the identities requesting access for a Cloud Platform resource. `members` can have * the following values: * * * `allUsers`: A special identifier that represents anyone who is on the internet; with or * without a Google account. * * * `allAuthenticatedUsers`: A special identifier that represents anyone who is authenticated * with a Google account or a service account. * * * `user:{emailid}`: An email address that represents a specific Google account. For example, * `alice@gmail.com` . * * * `serviceAccount:{emailid}`: An email address that represents a service account. For * example, `my-other-app@appspot.gserviceaccount.com`. * * * `group:{emailid}`: An email address that represents a Google group. For example, * `admins@example.com`. * * * `domain:{domain}`: The G Suite domain (primary) that represents all the users of that * domain. For example, `google.com` or `example.com`. * @return value or {@code null} for none */ public java.util.List<java.lang.String> getMembers() { return members; } /** * Specifies the identities requesting access for a Cloud Platform resource. `members` can have * the following values: * * * `allUsers`: A special identifier that represents anyone who is on the internet; with or * without a Google account. * * * `allAuthenticatedUsers`: A special identifier that represents anyone who is authenticated * with a Google account or a service account. * * * `user:{emailid}`: An email address that represents a specific Google account. For example, * `alice@gmail.com` . * * * `serviceAccount:{emailid}`: An email address that represents a service account. For * example, `my-other-app@appspot.gserviceaccount.com`. * * * `group:{emailid}`: An email address that represents a Google group. For example, * `admins@example.com`. * * * `domain:{domain}`: The G Suite domain (primary) that represents all the users of that * domain. For example, `google.com` or `example.com`. * @param members members or {@code null} for none */ public GoogleIamV1Binding setMembers(java.util.List<java.lang.String> members) { this.members = members; return this; } /** * Role that is assigned to `members`. For example, `roles/viewer`, `roles/editor`, or * `roles/owner`. * @return value or {@code null} for none */ public java.lang.String getRole() { return role; } /** * Role that is assigned to `members`. For example, `roles/viewer`, `roles/editor`, or * `roles/owner`. * @param role role or {@code null} for none */ public GoogleIamV1Binding setRole(java.lang.String role) { this.role = role; return this; } @Override public GoogleIamV1Binding set(String fieldName, Object value) { return (GoogleIamV1Binding) super.set(fieldName, value); } @Override public GoogleIamV1Binding clone() { return (GoogleIamV1Binding) super.clone(); } }
apache-2.0
apache/incubator-asterixdb
asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableIntermediateSqlAvgAggregateFunction.java
2240
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.asterix.runtime.aggregates.serializable.std; import java.io.DataOutput; import org.apache.hyracks.algebricks.runtime.base.IEvaluatorContext; import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory; import org.apache.hyracks.api.exceptions.HyracksDataException; import org.apache.hyracks.api.exceptions.SourceLocation; import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference; public class SerializableIntermediateSqlAvgAggregateFunction extends AbstractSerializableAvgAggregateFunction { public SerializableIntermediateSqlAvgAggregateFunction(IScalarEvaluatorFactory[] args, IEvaluatorContext context, SourceLocation sourceLoc) throws HyracksDataException { super(args, context, sourceLoc); } @Override public void step(IFrameTupleReference tuple, byte[] state, int start, int len) throws HyracksDataException { processPartialResults(tuple, state, start, len); } @Override public void finish(byte[] state, int start, int len, DataOutput result) throws HyracksDataException { finishPartialResults(state, start, len, result); } @Override public void finishPartial(byte[] state, int start, int len, DataOutput result) throws HyracksDataException { finishPartialResults(state, start, len, result); } @Override protected void processNull(byte[] state, int start) { } }
apache-2.0
AperIati/olingo-odata4
lib/commons-core/src/main/java/org/apache/olingo/commons/core/edm/EdmReturnTypeImpl.java
2344
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.olingo.commons.core.edm; import org.apache.olingo.commons.api.edm.Edm; import org.apache.olingo.commons.api.edm.EdmException; import org.apache.olingo.commons.api.edm.EdmReturnType; import org.apache.olingo.commons.api.edm.EdmType; import org.apache.olingo.commons.api.edm.geo.SRID; import org.apache.olingo.commons.api.edm.provider.CsdlReturnType; public class EdmReturnTypeImpl implements EdmReturnType { private final CsdlReturnType returnType; private final EdmTypeInfo typeInfo; private EdmType typeImpl; public EdmReturnTypeImpl(final Edm edm, final CsdlReturnType returnType) { this.returnType = returnType; typeInfo = new EdmTypeInfo.Builder().setEdm(edm).setTypeExpression(returnType.getType()).build(); } @Override public boolean isCollection() { return returnType.isCollection(); } @Override public boolean isNullable() { return returnType.isNullable(); } @Override public Integer getMaxLength() { return returnType.getMaxLength(); } @Override public Integer getPrecision() { return returnType.getPrecision(); } @Override public Integer getScale() { return returnType.getScale(); } @Override public SRID getSrid() { return returnType.getSrid(); } @Override public EdmType getType() { if (typeImpl == null) { typeImpl = typeInfo.getType(); if (typeImpl == null) { throw new EdmException("Cannot find type with name: " + typeInfo.getFullQualifiedName()); } } return typeImpl; } }
apache-2.0
spinnaker/clouddriver
clouddriver-core/src/main/java/com/netflix/spinnaker/clouddriver/model/InstanceTypeProvider.java
755
/* * Copyright 2014 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.spinnaker.clouddriver.model; import java.util.Set; public interface InstanceTypeProvider<T extends InstanceType> { Set<T> getAll(); }
apache-2.0
ptupitsyn/ignite
modules/core/src/main/java/org/apache/ignite/internal/processors/cache/persistence/tree/util/PageHandler.java
17219
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.persistence.tree.util; import java.nio.ByteBuffer; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.internal.pagemem.PageMemory; import org.apache.ignite.internal.pagemem.PageSupport; import org.apache.ignite.internal.pagemem.wal.IgniteWriteAheadLogManager; import org.apache.ignite.internal.pagemem.wal.record.delta.InitNewPageRecord; import org.apache.ignite.internal.processors.cache.persistence.tree.io.PageIO; import org.apache.ignite.internal.stat.IoStatisticsHolder; import org.apache.ignite.internal.util.GridUnsafe; import static java.lang.Boolean.FALSE; import static java.lang.Boolean.TRUE; /** * Page handler. */ public abstract class PageHandler<X, R> { /** */ private static final PageHandler<Void, Boolean> NO_OP = new PageHandler<Void, Boolean>() { @Override public Boolean run(int cacheId, long pageId, long page, long pageAddr, PageIO io, Boolean walPlc, Void arg, int intArg, IoStatisticsHolder statHolder) throws IgniteCheckedException { return Boolean.TRUE; } }; /** * @param cacheId Cache ID. * @param pageId Page ID. * @param page Page absolute pointer. * @param pageAddr Page address. * @param io IO. * @param walPlc Full page WAL record policy. * @param arg Argument. * @param intArg Argument of type {@code int}. * @param statHolder Statistics holder to track IO operations. * @return Result. * @throws IgniteCheckedException If failed. */ public abstract R run( int cacheId, long pageId, long page, long pageAddr, PageIO io, Boolean walPlc, X arg, int intArg, IoStatisticsHolder statHolder ) throws IgniteCheckedException; /** * @param cacheId Cache ID. * @param pageId Page ID. * @param page Page pointer. * @param pageAddr Page address. * @param arg Argument. * @param intArg Argument of type {@code int}. * @return {@code true} If release. */ public boolean releaseAfterWrite( int cacheId, long pageId, long page, long pageAddr, X arg, int intArg) { return true; } /** * @param pageMem Page memory. * @param cacheId Cache ID. * @param pageId Page ID. * @param lsnr Lock listener. * @param h Handler. * @param arg Argument. * @param intArg Argument of type {@code int}. * @param lockFailed Result in case of lock failure due to page recycling. * @param statHolder Statistics holder to track IO operations. * @return Handler result. * @throws IgniteCheckedException If failed. */ public static <X, R> R readPage( PageMemory pageMem, int cacheId, long pageId, PageLockListener lsnr, PageHandler<X, R> h, X arg, int intArg, R lockFailed, IoStatisticsHolder statHolder ) throws IgniteCheckedException { long page = pageMem.acquirePage(cacheId, pageId, statHolder); try { long pageAddr = readLock(pageMem, cacheId, pageId, page, lsnr); if (pageAddr == 0L) return lockFailed; try { PageIO io = PageIO.getPageIO(pageAddr); return h.run(cacheId, pageId, page, pageAddr, io, null, arg, intArg, statHolder); } finally { readUnlock(pageMem, cacheId, pageId, page, pageAddr, lsnr); } } finally { pageMem.releasePage(cacheId, pageId, page); } } /** * @param pageMem Page memory. * @param cacheId Cache ID. * @param pageId Page ID. * @param page Page pointer. * @param lsnr Lock listener. * @param h Handler. * @param arg Argument. * @param intArg Argument of type {@code int}. * @param lockFailed Result in case of lock failure due to page recycling. * @param statHolder Statistics holder to track IO operations. * @return Handler result. * @throws IgniteCheckedException If failed. */ public static <X, R> R readPage( PageMemory pageMem, int cacheId, long pageId, long page, PageLockListener lsnr, PageHandler<X, R> h, X arg, int intArg, R lockFailed, IoStatisticsHolder statHolder ) throws IgniteCheckedException { long pageAddr = 0L; try { if ((pageAddr = readLock(pageMem, cacheId, pageId, page, lsnr)) == 0L) return lockFailed; PageIO io = PageIO.getPageIO(pageAddr); return h.run(cacheId, pageId, page, pageAddr, io, null, arg, intArg, statHolder); } finally { if (pageAddr != 0L) readUnlock(pageMem, cacheId, pageId, page, pageAddr, lsnr); } } /** * @param pageMem Page memory. * @param cacheId Cache ID. * @param pageId Page ID. * @param page Page pointer. * @param lsnr Lock listener. * @return Page address. */ public static long readLock( PageMemory pageMem, int cacheId, long pageId, long page, PageLockListener lsnr) { lsnr.onBeforeReadLock(cacheId, pageId, page); long pageAddr = pageMem.readLock(cacheId, pageId, page); lsnr.onReadLock(cacheId, pageId, page, pageAddr); return pageAddr; } /** * @param pageMem Page memory. * @param cacheId Cache ID. * @param pageId Page ID. * @param page Page pointer. * @param pageAddr Page address (for-write pointer) * @param lsnr Lock listener. */ public static void readUnlock( PageMemory pageMem, int cacheId, long pageId, long page, long pageAddr, PageLockListener lsnr) { lsnr.onReadUnlock(cacheId, pageId, page, pageAddr); pageMem.readUnlock(cacheId, pageId, page); } /** * @param pageMem Page memory. * @param grpId Group ID. * @param pageId Page ID. * @param init IO for new page initialization. * @param wal Write ahead log. * @param lsnr Lock listener. * @param statHolder Statistics holder to track IO operations. * @throws IgniteCheckedException If failed. */ public static void initPage( PageMemory pageMem, int grpId, long pageId, PageIO init, IgniteWriteAheadLogManager wal, PageLockListener lsnr, IoStatisticsHolder statHolder ) throws IgniteCheckedException { Boolean res = writePage(pageMem, grpId, pageId, lsnr, PageHandler.NO_OP, init, wal, null, null, 0, FALSE, statHolder); assert res != FALSE; } /** * @param pageMem Page memory. * @param grpId Group ID. * @param pageId Page ID. * @param lsnr Lock listener. * @param h Handler. * @param init IO for new page initialization or {@code null} if it is an existing page. * @param wal Write ahead log. * @param walPlc Full page WAL record policy. * @param arg Argument. * @param intArg Argument of type {@code int}. * @param lockFailed Result in case of lock failure due to page recycling. * @param statHolder Statistics holder to track IO operations. * @return Handler result. * @throws IgniteCheckedException If failed. */ public static <X, R> R writePage( PageMemory pageMem, int grpId, final long pageId, PageLockListener lsnr, PageHandler<X, R> h, PageIO init, IgniteWriteAheadLogManager wal, Boolean walPlc, X arg, int intArg, R lockFailed, IoStatisticsHolder statHolder ) throws IgniteCheckedException { boolean releaseAfterWrite = true; long page = pageMem.acquirePage(grpId, pageId, statHolder); try { long pageAddr = writeLock(pageMem, grpId, pageId, page, lsnr, false); if (pageAddr == 0L) return lockFailed; boolean ok = false; try { if (init != null) { // It is a new page and we have to initialize it. doInitPage(pageMem, grpId, pageId, page, pageAddr, init, wal); walPlc = FALSE; } else init = PageIO.getPageIO(pageAddr); R res = h.run(grpId, pageId, page, pageAddr, init, walPlc, arg, intArg, statHolder); ok = true; return res; } finally { assert PageIO.getCrc(pageAddr) == 0; //TODO GG-11480 if (releaseAfterWrite = h.releaseAfterWrite(grpId, pageId, page, pageAddr, arg, intArg)) writeUnlock(pageMem, grpId, pageId, page, pageAddr, lsnr, walPlc, ok); } } finally { if (releaseAfterWrite) pageMem.releasePage(grpId, pageId, page); } } /** * @param pageMem Page memory. * @param grpId Group ID. * @param pageId Page ID. * @param page Page pointer. * @param lsnr Lock listener. * @param h Handler. * @param init IO for new page initialization or {@code null} if it is an existing page. * @param wal Write ahead log. * @param walPlc Full page WAL record policy. * @param arg Argument. * @param intArg Argument of type {@code int}. * @param lockFailed Result in case of lock failure due to page recycling. * @param statHolder Statistics holder to track IO operations. * @return Handler result. * @throws IgniteCheckedException If failed. */ public static <X, R> R writePage( PageMemory pageMem, int grpId, long pageId, long page, PageLockListener lsnr, PageHandler<X, R> h, PageIO init, IgniteWriteAheadLogManager wal, Boolean walPlc, X arg, int intArg, R lockFailed, IoStatisticsHolder statHolder ) throws IgniteCheckedException { long pageAddr = writeLock(pageMem, grpId, pageId, page, lsnr, false); if (pageAddr == 0L) return lockFailed; boolean ok = false; try { if (init != null) { // It is a new page and we have to initialize it. doInitPage(pageMem, grpId, pageId, page, pageAddr, init, wal); walPlc = FALSE; } else init = PageIO.getPageIO(pageAddr); R res = h.run(grpId, pageId, page, pageAddr, init, walPlc, arg, intArg, statHolder); ok = true; return res; } finally { assert PageIO.getCrc(pageAddr) == 0; //TODO GG-11480 if (h.releaseAfterWrite(grpId, pageId, page, pageAddr, arg, intArg)) writeUnlock(pageMem, grpId, pageId, page, pageAddr, lsnr, walPlc, ok); } } /** * @param pageMem Page memory. * @param cacheId Cache ID. * @param pageId Page ID. * @param page Page pointer. * @param pageAddr Page address. * @param lsnr Lock listener. * @param walPlc Full page WAL record policy. * @param dirty Page is dirty. */ public static void writeUnlock( PageMemory pageMem, int cacheId, long pageId, long page, long pageAddr, PageLockListener lsnr, Boolean walPlc, boolean dirty) { lsnr.onWriteUnlock(cacheId, pageId, page, pageAddr); pageMem.writeUnlock(cacheId, pageId, page, walPlc, dirty); } /** * @param pageMem Page memory. * @param cacheId Cache ID. * @param pageId Page ID. * @param page Page pointer. * @param lsnr Lock listener. * @param tryLock Only try to lock without waiting. * @return Page address or {@code 0} if failed to lock due to recycling. */ public static long writeLock( PageMemory pageMem, int cacheId, long pageId, long page, PageLockListener lsnr, boolean tryLock) { lsnr.onBeforeWriteLock(cacheId, pageId, page); long pageAddr = tryLock ? pageMem.tryWriteLock(cacheId, pageId, page) : pageMem.writeLock(cacheId, pageId, page); lsnr.onWriteLock(cacheId, pageId, page, pageAddr); return pageAddr; } /** * @param pageMem Page memory. * @param grpId Group ID. * @param pageId Page ID. * @param page Page pointer. * @param pageAddr Page address. * @param init Initial IO. * @param wal Write ahead log. * @throws IgniteCheckedException If failed. */ private static void doInitPage( PageMemory pageMem, int grpId, long pageId, long page, long pageAddr, PageIO init, IgniteWriteAheadLogManager wal) throws IgniteCheckedException { assert PageIO.getCrc(pageAddr) == 0; //TODO GG-11480 init.initNewPage(pageAddr, pageId, pageMem.realPageSize(grpId)); // Here we should never write full page, because it is known to be new. if (isWalDeltaRecordNeeded(pageMem, grpId, pageId, page, wal, FALSE)) wal.log(new InitNewPageRecord(grpId, pageId, init.getType(), init.getVersion(), pageId)); } /** * @param pageMem Page memory. * @param cacheId Cache ID. * @param pageId Page ID. * @param page Page pointer. * @param wal Write ahead log. * @param walPlc Full page WAL record policy. * @return {@code true} If we need to make a delta WAL record for the change in this page. */ public static boolean isWalDeltaRecordNeeded( PageSupport pageMem, int cacheId, long pageId, long page, IgniteWriteAheadLogManager wal, Boolean walPlc) { // If the page is clean, then it is either newly allocated or just after checkpoint. // In both cases we have to write full page contents to WAL. return wal != null && !wal.isAlwaysWriteFullPages() && walPlc != TRUE && !wal.disabled(cacheId) && (walPlc == FALSE || pageMem.isDirty(cacheId, pageId, page)); } /** * @param src Source. * @param srcOff Source offset in bytes. * @param dst Destination. * @param dstOff Destination offset in bytes. * @param cnt Bytes count to copy. */ public static void copyMemory(ByteBuffer src, long srcOff, ByteBuffer dst, long dstOff, long cnt) { byte[] srcArr = src.hasArray() ? src.array() : null; byte[] dstArr = dst.hasArray() ? dst.array() : null; long srcArrOff = src.hasArray() ? src.arrayOffset() + GridUnsafe.BYTE_ARR_OFF : 0; long dstArrOff = dst.hasArray() ? dst.arrayOffset() + GridUnsafe.BYTE_ARR_OFF : 0; long srcPtr = src.isDirect() ? GridUnsafe.bufferAddress(src) : 0; long dstPtr = dst.isDirect() ? GridUnsafe.bufferAddress(dst) : 0; GridUnsafe.copyMemory(srcArr, srcPtr + srcArrOff + srcOff, dstArr, dstPtr + dstArrOff + dstOff, cnt); } /** * Will zero memory in buf * @param buf Buffer. * @param off Offset. * @param len Length. */ public static void zeroMemory(ByteBuffer buf, int off, int len) { if (buf.isDirect()) GridUnsafe.setMemory(GridUnsafe.bufferAddress(buf) + off, len, (byte)0); else { for (int i = off; i < off + len; i++) buf.put(i, (byte)0); //TODO Optimize! } } /** * @param srcAddr Source. * @param srcOff Source offset in bytes. * @param dstAddr Destination. * @param dstOff Destination offset in bytes. * @param cnt Bytes count to copy. */ public static void copyMemory(long srcAddr, long srcOff, long dstAddr, long dstOff, long cnt) { GridUnsafe.copyMemory(null, srcAddr + srcOff, null, dstAddr + dstOff, cnt); } /** * @param addr Address. * @param off Offset. * @param len Length. */ public static void zeroMemory(long addr, int off, int len) { GridUnsafe.setMemory(addr + off, len, (byte)0); } }
apache-2.0
alibaba/fastjson
src/test/java/com/alibaba/json/bvt/bug/Bug_for_dubbo.java
1201
package com.alibaba.json.bvt.bug; import com.alibaba.fastjson.parser.ParserConfig; import org.junit.Assert; import junit.framework.TestCase; import com.alibaba.fastjson.JSON; import com.alibaba.fastjson.serializer.SerializerFeature; import com.alibaba.json.test.dubbo.HelloServiceImpl; import com.alibaba.json.test.dubbo.Tiger; import com.alibaba.json.test.dubbo.Tigers; public class Bug_for_dubbo extends TestCase { protected void setUp() throws Exception { ParserConfig.global.addAccept("com.alibaba.json.test.dubbo.Tigers"); } public void test_0 () throws Exception { HelloServiceImpl helloService = new HelloServiceImpl(); Tiger tiger = new Tiger(); tiger.setTigerName("东北虎"); tiger.setTigerSex(true); //Tiger tigers = helloService.eatTiger(tiger).getTiger(); Tigers tigers = helloService.eatTiger(tiger); Assert.assertNotNull(tigers.getTiger()); String text = JSON.toJSONString(tigers, SerializerFeature.WriteClassName); System.out.println(text); Tigers tigers1 = (Tigers) JSON.parse(text); Assert.assertNotNull(tigers1.getTiger()); } }
apache-2.0
VikingDen/selendroid
selendroid-server/src/io/selendroid/server/action/touch/PointerDown.java
797
package io.selendroid.server.action.touch; import io.selendroid.android.internal.Point; import io.selendroid.server.action.Action; import io.selendroid.server.action.ActionContext; import io.selendroid.server.model.SelendroidDriver; import io.selendroid.server.model.TouchScreen; import org.json.JSONException; import org.json.JSONObject; public class PointerDown extends Action { public PointerDown(SelendroidDriver driver) { super(driver); } @Override public void perform(JSONObject properties, ActionContext context) throws JSONException { Point actionPosition = getActionPosition(properties); int x = actionPosition.getX(); int y = actionPosition.getY(); TouchScreen touchScreen = driver.getTouch(); touchScreen.down(x, y); context.press(x, y); } }
apache-2.0
mhgrove/Empire
core/main/src/com/clarkparsia/empire/config/EmpireConfiguration.java
3201
/* * Copyright (c) 2009-2011 Clark & Parsia, LLC. <http://www.clarkparsia.com> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.clarkparsia.empire.config; import com.clarkparsia.empire.util.EmpireAnnotationProvider; import com.clarkparsia.empire.util.PropertiesAnnotationProvider; import com.clarkparsia.empire.EmpireOptions; import com.complexible.common.util.PrefixMapping; import com.google.common.base.Splitter; import java.util.Map; import java.util.HashMap; import java.lang.reflect.Field; /** * <p>A simple container class for EmpireConfiguration information.</p> * * @author Michael Grove * @since 0.6.2 * @version 0.7 */ public final class EmpireConfiguration { private Class<? extends EmpireAnnotationProvider> mAnnotationProvider = PropertiesAnnotationProvider.class; private Map<String, String> mGeneralConfiguration = new HashMap<String, String>(); private Map<String, Map<String, String>> mUnitConfiguration = new HashMap<String, Map<String, String>>(); public EmpireConfiguration() { } public EmpireConfiguration(final Map<String, String> theGeneralConfiguration, final Map<String, Map<String, String>> theUnitConfiguration) { mGeneralConfiguration = theGeneralConfiguration; mUnitConfiguration = theUnitConfiguration; for (String aKey : mGeneralConfiguration.keySet()) { try { Field aField = EmpireOptions.class.getField(aKey); aField.setBoolean(null, Boolean.parseBoolean(mGeneralConfiguration.get(aKey))); } catch (Exception e) { // no-op, field doesn't exist, or the value is badly formatted. oh well } } // auto add any namespace declarations in the configuration file. if (mGeneralConfiguration.containsKey("ns_list")) { String aList = mGeneralConfiguration.get("ns_list"); for (String aKey : Splitter.on(',').omitEmptyStrings().trimResults().split(aList)) { if (mGeneralConfiguration.containsKey(aKey)) { PrefixMapping.GLOBAL.addMapping(aKey, mGeneralConfiguration.get(aKey)); } } } } public Class<? extends EmpireAnnotationProvider> getAnnotationProvider() { return mAnnotationProvider; } public void setAnnotationProvider(final Class<? extends EmpireAnnotationProvider> theAnnotationProvider) { mAnnotationProvider = theAnnotationProvider; } public Map<String, String> getUnitConfig(final String theUnitName) { return mUnitConfiguration.get(theUnitName); } public Map<String, String> getGlobalConfig() { return mGeneralConfiguration; } public String get(String theKey) { return mGeneralConfiguration.get(theKey); } public boolean hasUnit(final String theUnitName) { return mUnitConfiguration.containsKey(theUnitName); } }
apache-2.0
justinedelson/spring-security
samples/contacts/src/main/java/sample/contact/ContactDaoSpring.java
9308
/* Copyright 2004, 2005, 2006 Acegi Technology Pty Limited * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package sample.contact; import org.springframework.jdbc.core.SqlParameter; import org.springframework.jdbc.core.support.JdbcDaoSupport; import org.springframework.jdbc.object.MappingSqlQuery; import org.springframework.jdbc.object.SqlUpdate; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Types; import java.util.List; import javax.sql.DataSource; /** * Base implementation of {@link ContactDao} that uses Spring JDBC services. * * @author Ben Alex */ public class ContactDaoSpring extends JdbcDaoSupport implements ContactDao { //~ Instance fields ================================================================================================ private ContactDelete contactDelete; private ContactInsert contactInsert; private ContactUpdate contactUpdate; private ContactsAllQuery contactsAllQuery; private ContactsByIdQuery contactsByIdQuery; private PrincipalsAllQuery principalsAllQuery; private RolesAllQuery rolesAllQuery; //~ Methods ======================================================================================================== public void create(Contact contact) { contactInsert.insert(contact); } public void delete(Long contactId) { contactDelete.delete(contactId); } public List<Contact> findAll() { return contactsAllQuery.execute(); } public List<String> findAllPrincipals() { return principalsAllQuery.execute(); } public List<String> findAllRoles() { return rolesAllQuery.execute(); } public Contact getById(Long id) { List<Contact> list = contactsByIdQuery.execute(id.longValue()); if (list.size() == 0) { return null; } else { return (Contact) list.get(0); } } protected void initDao() throws Exception { contactInsert = new ContactInsert(getDataSource()); contactUpdate = new ContactUpdate(getDataSource()); contactDelete = new ContactDelete(getDataSource()); contactsAllQuery = new ContactsAllQuery(getDataSource()); principalsAllQuery = new PrincipalsAllQuery(getDataSource()); rolesAllQuery = new RolesAllQuery(getDataSource()); contactsByIdQuery = new ContactsByIdQuery(getDataSource()); } public void update(Contact contact) { contactUpdate.update(contact); } //~ Inner Classes ================================================================================================== protected class AclObjectIdentityByObjectIdentityQuery extends MappingSqlQuery<Long> { protected AclObjectIdentityByObjectIdentityQuery(DataSource ds) { super(ds, "SELECT id FROM acl_object_identity WHERE object_identity = ?"); declareParameter(new SqlParameter(Types.VARCHAR)); compile(); } protected Long mapRow(ResultSet rs, int rownum) throws SQLException { return new Long(rs.getLong("id")); } } protected class AclObjectIdentityInsert extends SqlUpdate { protected AclObjectIdentityInsert(DataSource ds) { super(ds, "INSERT INTO acl_object_identity VALUES (?, ?, ?, ?)"); declareParameter(new SqlParameter(Types.BIGINT)); declareParameter(new SqlParameter(Types.VARCHAR)); declareParameter(new SqlParameter(Types.INTEGER)); declareParameter(new SqlParameter(Types.VARCHAR)); compile(); } protected int insert(String objectIdentity, Long parentAclObjectIdentity, String aclClass) { Object[] objs = new Object[] {null, objectIdentity, parentAclObjectIdentity, aclClass}; super.update(objs); return getJdbcTemplate().queryForInt("call identity()"); } } protected class ContactDelete extends SqlUpdate { protected ContactDelete(DataSource ds) { super(ds, "DELETE FROM contacts WHERE id = ?"); declareParameter(new SqlParameter(Types.BIGINT)); compile(); } protected void delete(Long contactId) { super.update(contactId.longValue()); } } protected class ContactInsert extends SqlUpdate { protected ContactInsert(DataSource ds) { super(ds, "INSERT INTO contacts VALUES (?, ?, ?)"); declareParameter(new SqlParameter(Types.BIGINT)); declareParameter(new SqlParameter(Types.VARCHAR)); declareParameter(new SqlParameter(Types.VARCHAR)); compile(); } protected void insert(Contact contact) { Object[] objs = new Object[] {contact.getId(), contact.getName(), contact.getEmail()}; super.update(objs); } } protected class ContactUpdate extends SqlUpdate { protected ContactUpdate(DataSource ds) { super(ds, "UPDATE contacts SET contact_name = ?, address = ? WHERE id = ?"); declareParameter(new SqlParameter(Types.VARCHAR)); declareParameter(new SqlParameter(Types.VARCHAR)); declareParameter(new SqlParameter(Types.BIGINT)); compile(); } protected void update(Contact contact) { Object[] objs = new Object[] {contact.getName(), contact.getEmail(), contact.getId()}; super.update(objs); } } protected class ContactsAllQuery extends MappingSqlQuery<Contact> { protected ContactsAllQuery(DataSource ds) { super(ds, "SELECT id, contact_name, email FROM contacts ORDER BY id"); compile(); } protected Contact mapRow(ResultSet rs, int rownum) throws SQLException { Contact contact = new Contact(); contact.setId(new Long(rs.getLong("id"))); contact.setName(rs.getString("contact_name")); contact.setEmail(rs.getString("email")); return contact; } } protected class ContactsByIdQuery extends MappingSqlQuery<Contact> { protected ContactsByIdQuery(DataSource ds) { super(ds, "SELECT id, contact_name, email FROM contacts WHERE id = ? ORDER BY id"); declareParameter(new SqlParameter(Types.BIGINT)); compile(); } protected Contact mapRow(ResultSet rs, int rownum) throws SQLException { Contact contact = new Contact(); contact.setId(new Long(rs.getLong("id"))); contact.setName(rs.getString("contact_name")); contact.setEmail(rs.getString("email")); return contact; } } protected class PermissionDelete extends SqlUpdate { protected PermissionDelete(DataSource ds) { super(ds, "DELETE FROM acl_permission WHERE ACL_OBJECT_IDENTITY = ? AND RECIPIENT = ?"); declareParameter(new SqlParameter(Types.BIGINT)); declareParameter(new SqlParameter(Types.VARCHAR)); compile(); } protected void delete(Long aclObjectIdentity, String recipient) { super.update(new Object[] {aclObjectIdentity, recipient}); } } protected class PermissionInsert extends SqlUpdate { protected PermissionInsert(DataSource ds) { super(ds, "INSERT INTO acl_permission VALUES (?, ?, ?, ?);"); declareParameter(new SqlParameter(Types.BIGINT)); declareParameter(new SqlParameter(Types.BIGINT)); declareParameter(new SqlParameter(Types.VARCHAR)); declareParameter(new SqlParameter(Types.INTEGER)); compile(); } protected int insert(Long aclObjectIdentity, String recipient, Integer mask) { Object[] objs = new Object[] {null, aclObjectIdentity, recipient, mask}; super.update(objs); return getJdbcTemplate().queryForInt("call identity()"); } } protected class PrincipalsAllQuery extends MappingSqlQuery<String> { protected PrincipalsAllQuery(DataSource ds) { super(ds, "SELECT username FROM users ORDER BY username"); compile(); } protected String mapRow(ResultSet rs, int rownum) throws SQLException { return rs.getString("username"); } } protected class RolesAllQuery extends MappingSqlQuery<String> { protected RolesAllQuery(DataSource ds) { super(ds, "SELECT DISTINCT authority FROM authorities ORDER BY authority"); compile(); } protected String mapRow(ResultSet rs, int rownum) throws SQLException { return rs.getString("authority"); } } }
apache-2.0
orange-cloudfoundry/cf-java-client
cloudfoundry-client/src/main/java/org/cloudfoundry/client/v2/routes/_RouteExistsRequest.java
1244
/* * Copyright 2013-2017 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.cloudfoundry.client.v2.routes; import com.fasterxml.jackson.annotation.JsonIgnore; import org.cloudfoundry.Nullable; import org.cloudfoundry.QueryParameter; import org.immutables.value.Value; /** * The request payload for the Check a Route exists operation */ @Value.Immutable abstract class _RouteExistsRequest { /** * The domain id */ @JsonIgnore abstract String getDomainId(); /** * The host */ @Nullable @JsonIgnore abstract String getHost(); /** * The path */ @Nullable @QueryParameter("path") abstract String getPath(); }
apache-2.0
porcelli-forks/kie-wb-common
kie-wb-common-stunner/kie-wb-common-stunner-sets/kie-wb-common-stunner-bpmn/kie-wb-common-stunner-bpmn-marshalling/src/main/java/org/kie/workbench/common/stunner/bpmn/client/marshall/converters/fromstunner/properties/FlatVariableScope.java
2631
/* * Copyright 2019 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.workbench.common.stunner.bpmn.client.marshall.converters.fromstunner.properties; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.Optional; import java.util.stream.Collectors; /** * A flat variable scope, where no nesting information is used. * <p> * In a flat scope there is no nesting. E.g.: * <p> * Process P defines variables x,y * <ul> * <li> SubProcess P1 nested in P defines P1_x, P2_y </li> * <li> SubProcess P2 nested in P defines P2_x </li> * </ul> * <p> * The FlatScope contains: * * <ul> * <li>x</li> * <li>y</li> * <li>P1_x</li> * <li>P1_y</li> * <li>P2_x</li> * </ul> * <p> * Also, P1 may refer to x, y, P1_x, P1_y, but also to P2_x, P2_y * <p> * In a flat scope, names can easily clash, * but it's simple to implement (it's a Map). * <p> * In future versions we might want to implement a more refined * Scope notion with nesting; in this case, * P1 may refer to x, y P1_x, P1_y, but NOT to P2_x, P2_y, because * P2 does not nest in P1 */ public class FlatVariableScope implements VariableScope { private Map<String, Variable> variables = new HashMap<>(); public Variable declare(String scopeId, String identifier, String type) { Variable variable = new Variable(scopeId, identifier, type); variables.put(identifier, variable); return variable; } public Variable declare(String scopeId, String identifier, String type, String tags) { Variable variable = new Variable(scopeId, identifier, type, tags); variables.put(identifier, variable); return variable; } public Optional<Variable> lookup(String identifier) { return Optional.ofNullable(variables.get(identifier)); } public Collection<Variable> getVariables(String scopeId) { return variables.values() .stream() .filter(v -> v.getParentScopeId().equals(scopeId)) .collect(Collectors.toList()); } }
apache-2.0
NiteshKant/RxJava
src/test/java/io/reactivex/internal/operators/observable/ObservableDoOnSubscribeTest.java
4737
/** * Copyright (c) 2016-present, RxJava Contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See * the License for the specific language governing permissions and limitations under the License. */ package io.reactivex.internal.operators.observable; import static org.junit.Assert.*; import java.util.List; import java.util.concurrent.atomic.*; import org.junit.Test; import io.reactivex.*; import io.reactivex.disposables.*; import io.reactivex.exceptions.TestException; import io.reactivex.functions.Consumer; import io.reactivex.plugins.RxJavaPlugins; public class ObservableDoOnSubscribeTest { @Test public void testDoOnSubscribe() throws Exception { final AtomicInteger count = new AtomicInteger(); Observable<Integer> o = Observable.just(1).doOnSubscribe(new Consumer<Disposable>() { @Override public void accept(Disposable d) { count.incrementAndGet(); } }); o.subscribe(); o.subscribe(); o.subscribe(); assertEquals(3, count.get()); } @Test public void testDoOnSubscribe2() throws Exception { final AtomicInteger count = new AtomicInteger(); Observable<Integer> o = Observable.just(1).doOnSubscribe(new Consumer<Disposable>() { @Override public void accept(Disposable d) { count.incrementAndGet(); } }).take(1).doOnSubscribe(new Consumer<Disposable>() { @Override public void accept(Disposable d) { count.incrementAndGet(); } }); o.subscribe(); assertEquals(2, count.get()); } @Test public void testDoOnUnSubscribeWorksWithRefCount() throws Exception { final AtomicInteger onSubscribed = new AtomicInteger(); final AtomicInteger countBefore = new AtomicInteger(); final AtomicInteger countAfter = new AtomicInteger(); final AtomicReference<Observer<? super Integer>> sref = new AtomicReference<Observer<? super Integer>>(); Observable<Integer> o = Observable.unsafeCreate(new ObservableSource<Integer>() { @Override public void subscribe(Observer<? super Integer> observer) { observer.onSubscribe(Disposables.empty()); onSubscribed.incrementAndGet(); sref.set(observer); } }).doOnSubscribe(new Consumer<Disposable>() { @Override public void accept(Disposable d) { countBefore.incrementAndGet(); } }).publish().refCount() .doOnSubscribe(new Consumer<Disposable>() { @Override public void accept(Disposable d) { countAfter.incrementAndGet(); } }); o.subscribe(); o.subscribe(); o.subscribe(); assertEquals(1, countBefore.get()); assertEquals(1, onSubscribed.get()); assertEquals(3, countAfter.get()); sref.get().onComplete(); o.subscribe(); o.subscribe(); o.subscribe(); assertEquals(2, countBefore.get()); assertEquals(2, onSubscribed.get()); assertEquals(6, countAfter.get()); } @Test public void onSubscribeCrash() { List<Throwable> errors = TestHelper.trackPluginErrors(); try { final Disposable bs = Disposables.empty(); new Observable<Integer>() { @Override protected void subscribeActual(Observer<? super Integer> observer) { observer.onSubscribe(bs); observer.onError(new TestException("Second")); observer.onComplete(); } } .doOnSubscribe(new Consumer<Disposable>() { @Override public void accept(Disposable d) throws Exception { throw new TestException("First"); } }) .test() .assertFailureAndMessage(TestException.class, "First"); assertTrue(bs.isDisposed()); TestHelper.assertUndeliverable(errors, 0, TestException.class, "Second"); } finally { RxJavaPlugins.reset(); } } }
apache-2.0
srkukarni/heron
integration_test/src/java/com/twitter/heron/integration_test/core/IntegrationTestSpout.java
8163
// Copyright 2016 Twitter. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.twitter.heron.integration_test.core; import java.io.IOException; import java.text.ParseException; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.UUID; import java.util.concurrent.TimeUnit; import java.util.logging.Level; import java.util.logging.Logger; import com.twitter.heron.api.spout.IRichSpout; import com.twitter.heron.api.spout.ISpoutOutputCollector; import com.twitter.heron.api.spout.SpoutOutputCollector; import com.twitter.heron.api.topology.OutputFieldsDeclarer; import com.twitter.heron.api.topology.TopologyContext; import com.twitter.heron.api.tuple.Fields; import com.twitter.heron.api.tuple.Values; public class IntegrationTestSpout implements IRichSpout { private static final long serialVersionUID = 6068686695658877942L; private static final Logger LOG = Logger.getLogger(IntegrationTestSpout.class.getName()); static final Values TERMINAL_TUPLE = new Values(Constants.INTEGRATION_TEST_TERMINAL); private final IRichSpout delegateSpout; private final String topologyStartedStateUrl; private long tuplesToAck = 0; private SpoutOutputCollector spoutOutputCollector; private boolean hasSetStarted = false; private int maxExecutions; private Map<Object, List<Object>> pendingMessages; public IntegrationTestSpout(IRichSpout delegateSpout, int maxExecutions, String topologyStartedStateUrl) { assert maxExecutions > 0; this.delegateSpout = delegateSpout; this.maxExecutions = maxExecutions; this.topologyStartedStateUrl = topologyStartedStateUrl; this.pendingMessages = new HashMap<>(); } protected void resetMaxExecutions(int resetExecutions) { if (!doneEmitting() || !doneAcking()) { throw new RuntimeException( "Can not reset resetMaxExecutions while tuples are still bing emitted or acked"); } this.maxExecutions = resetExecutions; } @Override public void declareOutputFields(OutputFieldsDeclarer outputFieldsDeclarer) { outputFieldsDeclarer.declareStream(Constants.INTEGRATION_TEST_CONTROL_STREAM_ID, new Fields(Constants.INTEGRATION_TEST_TERMINAL)); delegateSpout.declareOutputFields(outputFieldsDeclarer); } @Override public Map<String, Object> getComponentConfiguration() { return delegateSpout.getComponentConfiguration(); } @Override public void close() { delegateSpout.close(); } @Override public void activate() { delegateSpout.activate(); } @Override public void deactivate() { delegateSpout.deactivate(); } @Override public void open(Map<String, Object> map, TopologyContext topologyContext, SpoutOutputCollector outputCollector) { // Here the spoutOutputCollector should be a default one // to emit tuples without adding MessageId this.spoutOutputCollector = outputCollector; delegateSpout.open(map, topologyContext, new SpoutOutputCollector(new IntegrationTestSpoutCollector(outputCollector))); } @Override public void nextTuple() { if (doneEmitting()) { return; } else if (!this.hasSetStarted) { setStateToStarted(); this.hasSetStarted = true; } maxExecutions--; LOG.fine("maxExecutions = " + maxExecutions); delegateSpout.nextTuple(); // We need a double check here rather than set the isDone == true in nextTuple() // Since it is possible before nextTuple we get all the acks and the topology is done // However, since the isDone is not set to true, we may not emit terminals; it will cause bug. if (doneEmitting()) { // This is needed if all the tuples have been emitted and acked // before maxExecutions becomes 0 emitTerminalIfNeeded(); LOG.fine("The topology is done."); } else { if (getPostEmitSleepTime() > 0) { try { getPostEmitSleepTimeUnit().sleep(getPostEmitSleepTime()); } catch (InterruptedException e) { LOG.log(Level.SEVERE, "Thread interrupted while trying to sleep post-emit", e); } } } } @Override public void ack(Object messageId) { tuplesToAck--; LOG.info("Received an ack with MessageId: " + messageId + " tuplesToAck=" + tuplesToAck); if (!isTestMessageId(messageId)) { delegateSpout.ack(messageId); } else { handleAckedMessage(messageId, pendingMessages.get(messageId)); } emitTerminalIfNeeded(); } protected void handleAckedMessage(Object messageId, List<Object> tuple) { pendingMessages.remove(messageId); } @Override public void fail(Object messageId) { LOG.info("Received a fail with MessageId: " + messageId); if (!isTestMessageId(messageId)) { delegateSpout.fail(messageId); } else { if (pendingMessages.containsKey(messageId)) { LOG.info("Re-emitting failed tuple with messageId " + messageId + ", tuple: " + pendingMessages.get(messageId)); spoutOutputCollector.emit(pendingMessages.get(messageId), messageId); } } emitTerminalIfNeeded(); } private static boolean isTestMessageId(Object messageId) { return ((String) messageId).startsWith(Constants.INTEGRATION_TEST_MOCK_MESSAGE_ID); } protected boolean doneEmitting() { return maxExecutions <= 0; } protected boolean doneAcking() { return tuplesToAck == 0; } protected TimeUnit getPostEmitSleepTimeUnit() { return TimeUnit.MILLISECONDS; } protected long getPostEmitSleepTime() { return 0; } protected void emitTerminalIfNeeded() { LOG.fine(String.format("doneEmitting = %s, tuplesToAck = %s", doneEmitting(), tuplesToAck)); if (doneEmitting() && doneAcking()) { LOG.info("Emitting terminals to downstream."); spoutOutputCollector.emit(Constants.INTEGRATION_TEST_CONTROL_STREAM_ID, TERMINAL_TUPLE); } // Else, do nothing } protected class IntegrationTestSpoutCollector implements ISpoutOutputCollector { private final ISpoutOutputCollector delegate; IntegrationTestSpoutCollector(ISpoutOutputCollector delegate) { this.delegate = delegate; } @Override public List<Integer> emit(String streamId, List<Object> tuple, Object messageId) { tuplesToAck++; LOG.info("Emitting tuple: " + tuple + ", tuplesToAck=" + tuplesToAck); return delegate.emit(streamId, tuple, getMessageId(tuple, messageId)); } @Override public void emitDirect(int taskId, String streamId, List<Object> tuple, Object messageId) { tuplesToAck++; LOG.info("Emitting tuple: " + tuple + ", tuplesToAck=" + tuplesToAck); delegate.emitDirect(taskId, streamId, tuple, getMessageId(tuple, messageId)); } @Override public void reportError(Throwable throwable) { delegate.reportError(throwable); } private Object getMessageId(List<Object> tuple, Object messageId) { Object id = messageId; if (id == null) { LOG.fine("Add MessageId for tuple: " + tuple); id = Constants.INTEGRATION_TEST_MOCK_MESSAGE_ID + "_" + UUID.randomUUID(); } pendingMessages.put(id, tuple); return id; } } private void setStateToStarted() { if (topologyStartedStateUrl == null) { return; } try { HttpUtils.httpJsonPost(topologyStartedStateUrl, "\"true\""); } catch (IOException | ParseException e) { throw new RuntimeException( "Failure posting topology_started state to " + topologyStartedStateUrl, e); } } }
apache-2.0
issi8131/konashi-android-sdk-master
konashi-android-sdk/src/main/java/com/uxxu/konashi/lib/KonashiUUID.java
7688
package com.uxxu.konashi.lib; import java.util.UUID; /** * konashiで使用するGATTのUUID * * @author monakaz, YUKAI Engineering * http://konashi.ux-xu.com * ======================================================================== * Copyright 2014 Yukai Engineering Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ public class KonashiUUID { //konashi2,koshain用ベースUUID,konashi(v1)使用時は要調査 public static final String BASE_UUID_STRING = "-03FB-40DA-98A7-B0DEF65C2D4B"; // Battery service UUID public static final UUID BATTERY_SERVICE_UUID = UUID.fromString("0000180F-0000-1000-8000-00805f9b34fb"); public static final UUID BATTERY_LEVEL_UUID = UUID.fromString("00002A19-0000-1000-8000-00805f9b34fb"); /** * 以下UUIDについてはkonashi2,koshainは"229Bxxxx",konashi(v1)は"0000xxxx" */ // konashi service UUID public static final UUID KONASHI_SERVICE_UUID = UUID.fromString("229BFF00" + BASE_UUID_STRING); // konashi characteristics public static final UUID PIO_SETTING_UUID = UUID.fromString("229B3000" + BASE_UUID_STRING); public static final UUID PIO_PULLUP_UUID = UUID.fromString("229B3001" + BASE_UUID_STRING); public static final UUID PIO_OUTPUT_UUID = UUID.fromString("229B3002" + BASE_UUID_STRING); public static final UUID PIO_INPUT_NOTIFICATION_UUID = UUID.fromString("229B3003" + BASE_UUID_STRING); public static final UUID PWM_CONFIG_UUID = UUID.fromString("229B3004" + BASE_UUID_STRING); public static final UUID PWM_PARAM_UUID = UUID.fromString("229B3005" + BASE_UUID_STRING); public static final UUID PWM_DUTY_UUID = UUID.fromString("229B3006" + BASE_UUID_STRING); public static final UUID ANALOG_DRIVE_UUID = UUID.fromString("229B3007" + BASE_UUID_STRING); public static final UUID ANALOG_READ0_UUID = UUID.fromString("229B3008" + BASE_UUID_STRING); public static final UUID ANALOG_READ1_UUID = UUID.fromString("229B3009" + BASE_UUID_STRING); public static final UUID ANALOG_READ2_UUID = UUID.fromString("229B300A" + BASE_UUID_STRING); public static final UUID I2C_CONFIG_UUID = UUID.fromString("229B300B" + BASE_UUID_STRING); public static final UUID I2C_START_STOP_UUID = UUID.fromString("229B300C" + BASE_UUID_STRING); public static final UUID I2C_WRITE_UUID = UUID.fromString("229B300D" + BASE_UUID_STRING); public static final UUID I2C_READ_PARAM_UUID = UUID.fromString("229B300E" + BASE_UUID_STRING); public static final UUID I2C_READ_UUID = UUID.fromString("229B300F" + BASE_UUID_STRING); public static final UUID UART_CONFIG_UUID = UUID.fromString("229B3010" + BASE_UUID_STRING); public static final UUID UART_BAUDRATE_UUID = UUID.fromString("229B3011" + BASE_UUID_STRING); public static final UUID UART_TX_UUID = UUID.fromString("229B3012" + BASE_UUID_STRING); public static final UUID UART_RX_NOTIFICATION_UUID = UUID.fromString("229B3013" + BASE_UUID_STRING); public static final UUID HARDWARE_RESET_UUID = UUID.fromString("229B3014" + BASE_UUID_STRING); public static final UUID HARDWARE_LOW_BAT_NOTIFICATION_UUID = UUID.fromString("229B3015" + BASE_UUID_STRING); // konashi characteristic configuration public static final UUID CLIENT_CHARACTERISTIC_CONFIG = UUID.fromString("00002902-0000-1000-8000-00805f9b34fb"); // Konashi v1 uuids(old codes) // // public static final String BASE_UUID_STRING = "-0000-1000-8000-00805F9B34FB"; // // // Battery service UUID // public static final UUID BATTERY_SERVICE_UUID = UUID.fromString("0000180F" + BASE_UUID_STRING); // public static final UUID BATTERY_LEVEL_UUID = UUID.fromString("00002A19" + BASE_UUID_STRING); // // // konashi service UUID // public static final UUID KONASHI_SERVICE_UUID = UUID.fromString("0000FF00" + BASE_UUID_STRING); // // // konashi characteristics // public static final UUID PIO_SETTING_UUID = UUID.fromString("00003000" + BASE_UUID_STRING); // public static final UUID PIO_PULLUP_UUID = UUID.fromString("00003001" + BASE_UUID_STRING); // public static final UUID PIO_OUTPUT_UUID = UUID.fromString("00003002" + BASE_UUID_STRING); // public static final UUID PIO_INPUT_NOTIFICATION_UUID = UUID.fromString("00003003" + BASE_UUID_STRING); // // public static final UUID PWM_CONFIG_UUID = UUID.fromString("00003004" + BASE_UUID_STRING); // public static final UUID PWM_PARAM_UUID = UUID.fromString("00003005" + BASE_UUID_STRING); // public static final UUID PWM_DUTY_UUID = UUID.fromString("00003006" + BASE_UUID_STRING); // // public static final UUID ANALOG_DRIVE_UUID = UUID.fromString("00003007" + BASE_UUID_STRING); // public static final UUID ANALOG_READ0_UUID = UUID.fromString("00003008" + BASE_UUID_STRING); // public static final UUID ANALOG_READ1_UUID = UUID.fromString("00003009" + BASE_UUID_STRING); // public static final UUID ANALOG_READ2_UUID = UUID.fromString("0000300A" + BASE_UUID_STRING); // // public static final UUID I2C_CONFIG_UUID = UUID.fromString("0000300B" + BASE_UUID_STRING); // public static final UUID I2C_START_STOP_UUID = UUID.fromString("0000300C" + BASE_UUID_STRING); // public static final UUID I2C_WRITE_UUID = UUID.fromString("0000300D" + BASE_UUID_STRING); // public static final UUID I2C_READ_PARAM_UUID = UUID.fromString("0000300E" + BASE_UUID_STRING); // public static final UUID I2C_READ_UUID = UUID.fromString("0000300F" + BASE_UUID_STRING); // // public static final UUID UART_CONFIG_UUID = UUID.fromString("00003010" + BASE_UUID_STRING); // public static final UUID UART_BAUDRATE_UUID = UUID.fromString("00003011" + BASE_UUID_STRING); // public static final UUID UART_TX_UUID = UUID.fromString("00003012" + BASE_UUID_STRING); // public static final UUID UART_RX_NOTIFICATION_UUID = UUID.fromString("00003013" + BASE_UUID_STRING); // // public static final UUID HARDWARE_RESET_UUID = UUID.fromString("00003014" + BASE_UUID_STRING); // public static final UUID HARDWARE_LOW_BAT_NOTIFICATION_UUID = UUID.fromString("00003015" + BASE_UUID_STRING); // // // konashi characteristic configuration // public static final UUID CLIENT_CHARACTERISTIC_CONFIG = UUID.fromString("00002902" + BASE_UUID_STRING); }
apache-2.0
apache/activemq-openwire
openwire-legacy/src/main/java/org/apache/activemq/openwire/codec/v3/MessageAckMarshaller.java
6525
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.openwire.codec.v3; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import org.apache.activemq.openwire.codec.BooleanStream; import org.apache.activemq.openwire.codec.OpenWireFormat; import org.apache.activemq.openwire.commands.ConsumerId; import org.apache.activemq.openwire.commands.DataStructure; import org.apache.activemq.openwire.commands.MessageAck; import org.apache.activemq.openwire.commands.MessageId; import org.apache.activemq.openwire.commands.OpenWireDestination; import org.apache.activemq.openwire.commands.TransactionId; public class MessageAckMarshaller extends BaseCommandMarshaller { /** * Return the type of Data Structure we marshal * * @return short representation of the type data structure */ @Override public byte getDataStructureType() { return MessageAck.DATA_STRUCTURE_TYPE; } /** * @return a new object instance */ @Override public DataStructure createObject() { return new MessageAck(); } /** * Un-marshal an object instance from the data input stream * * @param o * the object to un-marshal * @param dataIn * the data input stream to build the object from * @throws IOException */ @Override public void tightUnmarshal(OpenWireFormat wireFormat, Object o, DataInput dataIn, BooleanStream bs) throws IOException { super.tightUnmarshal(wireFormat, o, dataIn, bs); MessageAck info = (MessageAck) o; info.setDestination((OpenWireDestination) tightUnmarsalCachedObject(wireFormat, dataIn, bs)); info.setTransactionId((TransactionId) tightUnmarsalCachedObject(wireFormat, dataIn, bs)); info.setConsumerId((ConsumerId) tightUnmarsalCachedObject(wireFormat, dataIn, bs)); info.setAckType(dataIn.readByte()); info.setFirstMessageId((MessageId) tightUnmarsalNestedObject(wireFormat, dataIn, bs)); info.setLastMessageId((MessageId) tightUnmarsalNestedObject(wireFormat, dataIn, bs)); info.setMessageCount(dataIn.readInt()); } /** * Write the booleans that this object uses to a BooleanStream */ @Override public int tightMarshal1(OpenWireFormat wireFormat, Object o, BooleanStream bs) throws IOException { MessageAck info = (MessageAck) o; int rc = super.tightMarshal1(wireFormat, o, bs); rc += tightMarshalCachedObject1(wireFormat, info.getDestination(), bs); rc += tightMarshalCachedObject1(wireFormat, info.getTransactionId(), bs); rc += tightMarshalCachedObject1(wireFormat, info.getConsumerId(), bs); rc += tightMarshalNestedObject1(wireFormat, info.getFirstMessageId(), bs); rc += tightMarshalNestedObject1(wireFormat, info.getLastMessageId(), bs); return rc + 5; } /** * Write a object instance to data output stream * * @param o * the instance to be marshaled * @param dataOut * the output stream * @throws IOException * thrown if an error occurs */ @Override public void tightMarshal2(OpenWireFormat wireFormat, Object o, DataOutput dataOut, BooleanStream bs) throws IOException { super.tightMarshal2(wireFormat, o, dataOut, bs); MessageAck info = (MessageAck) o; tightMarshalCachedObject2(wireFormat, info.getDestination(), dataOut, bs); tightMarshalCachedObject2(wireFormat, info.getTransactionId(), dataOut, bs); tightMarshalCachedObject2(wireFormat, info.getConsumerId(), dataOut, bs); dataOut.writeByte(info.getAckType()); tightMarshalNestedObject2(wireFormat, info.getFirstMessageId(), dataOut, bs); tightMarshalNestedObject2(wireFormat, info.getLastMessageId(), dataOut, bs); dataOut.writeInt(info.getMessageCount()); } /** * Un-marshal an object instance from the data input stream * * @param o * the object to un-marshal * @param dataIn * the data input stream to build the object from * @throws IOException */ @Override public void looseUnmarshal(OpenWireFormat wireFormat, Object o, DataInput dataIn) throws IOException { super.looseUnmarshal(wireFormat, o, dataIn); MessageAck info = (MessageAck) o; info.setDestination((OpenWireDestination) looseUnmarsalCachedObject(wireFormat, dataIn)); info.setTransactionId((TransactionId) looseUnmarsalCachedObject(wireFormat, dataIn)); info.setConsumerId((ConsumerId) looseUnmarsalCachedObject(wireFormat, dataIn)); info.setAckType(dataIn.readByte()); info.setFirstMessageId((MessageId) looseUnmarsalNestedObject(wireFormat, dataIn)); info.setLastMessageId((MessageId) looseUnmarsalNestedObject(wireFormat, dataIn)); info.setMessageCount(dataIn.readInt()); } /** * Write the booleans that this object uses to a BooleanStream */ @Override public void looseMarshal(OpenWireFormat wireFormat, Object o, DataOutput dataOut) throws IOException { MessageAck info = (MessageAck) o; super.looseMarshal(wireFormat, o, dataOut); looseMarshalCachedObject(wireFormat, info.getDestination(), dataOut); looseMarshalCachedObject(wireFormat, info.getTransactionId(), dataOut); looseMarshalCachedObject(wireFormat, info.getConsumerId(), dataOut); dataOut.writeByte(info.getAckType()); looseMarshalNestedObject(wireFormat, info.getFirstMessageId(), dataOut); looseMarshalNestedObject(wireFormat, info.getLastMessageId(), dataOut); dataOut.writeInt(info.getMessageCount()); } }
apache-2.0
Fabryprog/camel
core/camel-base/src/main/java/org/apache/camel/processor/LoopProcessor.java
6648
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.processor; import org.apache.camel.AsyncCallback; import org.apache.camel.Exchange; import org.apache.camel.Expression; import org.apache.camel.NoTypeConversionAvailableException; import org.apache.camel.Predicate; import org.apache.camel.Processor; import org.apache.camel.Traceable; import org.apache.camel.spi.IdAware; import org.apache.camel.support.ExchangeHelper; import org.apache.camel.support.processor.DelegateAsyncProcessor; import static org.apache.camel.processor.PipelineHelper.continueProcessing; /** * The processor which sends messages in a loop. */ public class LoopProcessor extends DelegateAsyncProcessor implements Traceable, IdAware { private String id; private final Expression expression; private final Predicate predicate; private final boolean copy; public LoopProcessor(Processor processor, Expression expression, Predicate predicate, boolean copy) { super(processor); this.expression = expression; this.predicate = predicate; this.copy = copy; } @Override public boolean process(Exchange exchange, AsyncCallback callback) { try { LoopState state = new LoopState(exchange, callback); if (exchange.isTransacted()) { exchange.getContext().getReactiveExecutor().scheduleSync(state); } else { exchange.getContext().getReactiveExecutor().scheduleMain(state); } return false; } catch (Exception e) { exchange.setException(e); callback.done(true); return true; } } /** * Class holding state for loop processing */ class LoopState implements Runnable { final Exchange exchange; final AsyncCallback callback; Exchange current; int index; int count; public LoopState(Exchange exchange, AsyncCallback callback) throws NoTypeConversionAvailableException { this.exchange = exchange; this.callback = callback; this.current = exchange; // evaluate expression / predicate if (expression != null) { // Intermediate conversion to String is needed when direct conversion to Integer is not available // but evaluation result is a textual representation of a numeric value. String text = expression.evaluate(exchange, String.class); count = ExchangeHelper.convertToMandatoryType(exchange, Integer.class, text); exchange.setProperty(Exchange.LOOP_SIZE, count); } } @Override public void run() { try { // check for error if so we should break out boolean cont = continueProcessing(current, "so breaking out of loop", log); boolean doWhile = predicate == null || predicate.matches(current); boolean doLoop = expression == null || index < count; // iterate if (cont && doWhile && doLoop) { // and prepare for next iteration current = prepareExchange(exchange, index); // set current index as property log.debug("LoopProcessor: iteration #{}", index); current.setProperty(Exchange.LOOP_INDEX, index); processor.process(current, doneSync -> { // increment counter after done index++; exchange.getContext().getReactiveExecutor().schedule(this); }); } else { // we are done so prepare the result ExchangeHelper.copyResults(exchange, current); log.trace("Processing complete for exchangeId: {} >>> {}", exchange.getExchangeId(), exchange); callback.done(false); } } catch (Exception e) { log.trace("Processing failed for exchangeId: {} >>> {}", exchange.getExchangeId(), e.getMessage()); exchange.setException(e); callback.done(false); } } public String toString() { return "LoopState[" + exchange.getExchangeId() + "]"; } } /** * Prepares the exchange for the next iteration * * @param exchange the exchange * @param index the index of the next iteration * @return the exchange to use */ protected Exchange prepareExchange(Exchange exchange, int index) { if (copy) { // use a copy but let it reuse the same exchange id so it appear as one exchange // use the original exchange rather than the looping exchange (esp. with the async routing engine) return ExchangeHelper.createCopy(exchange, true); } else { ExchangeHelper.prepareOutToIn(exchange); return exchange; } } public Expression getExpression() { return expression; } public Predicate getPredicate() { return predicate; } public boolean isCopy() { return copy; } public String getTraceLabel() { if (predicate != null) { return "loopWhile[" + predicate + "]"; } else { return "loop[" + expression + "]"; } } public String getId() { return id; } public void setId(String id) { this.id = id; } @Override public String toString() { if (predicate != null) { return "Loop[while: " + predicate + " do: " + getProcessor() + "]"; } else { return "Loop[for: " + expression + " times do: " + getProcessor() + "]"; } } }
apache-2.0
diydyq/velocity-engine
velocity-engine-core/src/test/java/org/apache/velocity/test/IncludeEventHandlingTestCase.java
7297
package org.apache.velocity.test; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import java.io.BufferedWriter; import java.io.FileOutputStream; import java.io.OutputStreamWriter; import java.io.Writer; import junit.framework.Test; import junit.framework.TestSuite; import org.apache.velocity.Template; import org.apache.velocity.VelocityContext; import org.apache.velocity.app.Velocity; import org.apache.velocity.app.event.EventCartridge; import org.apache.velocity.app.event.IncludeEventHandler; import org.apache.velocity.context.Context; import org.apache.velocity.runtime.RuntimeServices; import org.apache.velocity.runtime.RuntimeSingleton; import org.apache.velocity.test.misc.TestLogChute; import org.apache.velocity.util.RuntimeServicesAware; /** * Tests event handling * * @author <a href="mailto:geirm@optonline.net">Geir Magnusson Jr.</a> * @version $Id$ */ public class IncludeEventHandlingTestCase extends BaseTestCase implements IncludeEventHandler,RuntimeServicesAware { /** * VTL file extension. */ private static final String TMPL_FILE_EXT = "vm"; /** * Comparison file extension. */ private static final String CMP_FILE_EXT = "cmp"; /** * Comparison file extension. */ private static final String RESULT_FILE_EXT = "res"; /** * Path for templates. This property will override the * value in the default velocity properties file. */ private final static String FILE_RESOURCE_LOADER_PATH = TEST_COMPARE_DIR + "/includeevent"; /** * Results relative to the build directory. */ private static final String RESULTS_DIR = TEST_RESULT_DIR + "/includeevent"; /** * Results relative to the build directory. */ private static final String COMPARE_DIR = TEST_COMPARE_DIR + "/includeevent/compare"; private static final int PASS_THROUGH=0; private static final int RELATIVE_PATH=1; private static final int BLOCK=2; private int EventHandlerBehavior = PASS_THROUGH; /** * Default constructor. */ public IncludeEventHandlingTestCase(String name) { super(name); } public void setUp() throws Exception { assureResultsDirectoryExists(RESULTS_DIR); Velocity.reset(); Velocity.addProperty( Velocity.FILE_RESOURCE_LOADER_PATH, FILE_RESOURCE_LOADER_PATH); Velocity.setProperty( Velocity.RUNTIME_LOG_LOGSYSTEM_CLASS, TestLogChute.class.getName()); Velocity.init(); } public static Test suite () { return new TestSuite(IncludeEventHandlingTestCase.class); } /** * Runs the test. */ public void testIncludeEventHandling () throws Exception { Template template1 = RuntimeSingleton.getTemplate( getFileName(null, "test1", TMPL_FILE_EXT)); Template template2 = RuntimeSingleton.getTemplate( getFileName(null, "subdir/test2", TMPL_FILE_EXT)); Template template3 = RuntimeSingleton.getTemplate( getFileName(null, "test3", TMPL_FILE_EXT)); FileOutputStream fos1 = new FileOutputStream ( getFileName(RESULTS_DIR, "test1", RESULT_FILE_EXT)); FileOutputStream fos2 = new FileOutputStream ( getFileName(RESULTS_DIR, "test2", RESULT_FILE_EXT)); FileOutputStream fos3 = new FileOutputStream ( getFileName(RESULTS_DIR, "test3", RESULT_FILE_EXT)); Writer writer1 = new BufferedWriter(new OutputStreamWriter(fos1)); Writer writer2 = new BufferedWriter(new OutputStreamWriter(fos2)); Writer writer3 = new BufferedWriter(new OutputStreamWriter(fos3)); /* * lets make a Context and add the event cartridge */ Context context = new VelocityContext(); /* * Now make an event cartridge, register the * input event handler and attach it to the * Context */ EventCartridge ec = new EventCartridge(); ec.addEventHandler(this); ec.attachToContext( context ); // BEHAVIOR A: pass through #input and #parse with no change EventHandlerBehavior = PASS_THROUGH; template1.merge(context, writer1); writer1.flush(); writer1.close(); // BEHAVIOR B: pass through #input and #parse with using a relative path EventHandlerBehavior = RELATIVE_PATH; template2.merge(context, writer2); writer2.flush(); writer2.close(); // BEHAVIOR C: refuse to pass through #input and #parse EventHandlerBehavior = BLOCK; template3.merge(context, writer3); writer3.flush(); writer3.close(); if (!isMatch(RESULTS_DIR, COMPARE_DIR, "test1", RESULT_FILE_EXT, CMP_FILE_EXT) || !isMatch(RESULTS_DIR, COMPARE_DIR, "test2", RESULT_FILE_EXT, CMP_FILE_EXT) || !isMatch(RESULTS_DIR, COMPARE_DIR, "test3", RESULT_FILE_EXT, CMP_FILE_EXT) ) { fail("Output incorrect."); } } public void setRuntimeServices( RuntimeServices rs ) { } /** * Sample handler with different behaviors for the different tests. */ public String includeEvent( String includeResourcePath, String currentResourcePath, String directiveName) { if (EventHandlerBehavior == PASS_THROUGH) return includeResourcePath; // treat as relative path else if (EventHandlerBehavior == RELATIVE_PATH) { // if the resource name starts with a slash, it's not a relative path if (includeResourcePath.startsWith("/") || includeResourcePath.startsWith("\\") ) { return includeResourcePath; } int lastslashpos = Math.max( currentResourcePath.lastIndexOf("/"), currentResourcePath.lastIndexOf("\\") ); // root of resource tree if ( (lastslashpos == -1)) return includeResourcePath; // prepend path to the input path else return currentResourcePath.substring(0,lastslashpos) + "/" + includeResourcePath; } else if (EventHandlerBehavior == BLOCK) return null; // should never happen else return null; } }
apache-2.0
darkforestzero/buck
src/com/facebook/buck/android/AndroidResource.java
14462
/* * Copyright 2012-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.android; import static com.facebook.buck.rules.BuildableProperties.Kind.ANDROID; import static com.facebook.buck.rules.BuildableProperties.Kind.LIBRARY; import com.facebook.buck.android.aapt.MiniAapt; import com.facebook.buck.model.BuildTarget; import com.facebook.buck.model.BuildTargets; import com.facebook.buck.rules.AbstractBuildRule; import com.facebook.buck.rules.AddToRuleKey; import com.facebook.buck.rules.BuildContext; import com.facebook.buck.rules.BuildOutputInitializer; import com.facebook.buck.rules.BuildRule; import com.facebook.buck.rules.BuildRuleParams; import com.facebook.buck.rules.BuildableContext; import com.facebook.buck.rules.BuildableProperties; import com.facebook.buck.rules.ExplicitBuildTargetSourcePath; import com.facebook.buck.rules.InitializableFromDisk; import com.facebook.buck.rules.OnDiskBuildInfo; import com.facebook.buck.rules.SourcePath; import com.facebook.buck.rules.SourcePathRuleFinder; import com.facebook.buck.rules.keys.SupportsInputBasedRuleKey; import com.facebook.buck.step.Step; import com.facebook.buck.step.fs.MakeCleanDirectoryStep; import com.facebook.buck.step.fs.TouchStep; import com.facebook.buck.step.fs.WriteFileStep; import com.facebook.buck.util.HumanReadableException; import com.facebook.buck.util.MoreCollectors; import com.facebook.buck.util.MoreMaps; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.base.Supplier; import com.google.common.base.Suppliers; import com.google.common.collect.FluentIterable; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedMap; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.Ordering; import java.nio.file.Path; import java.util.concurrent.atomic.AtomicReference; import javax.annotation.Nullable; /** * An object that represents the resources of an android library. * <p> * Suppose this were a rule defined in <code>src/com/facebook/feed/BUCK</code>: * <pre> * android_resources( * name = 'res', * res = 'res', * assets = 'buck-assets', * deps = [ * '//first-party/orca/lib-ui:lib-ui', * ], * ) * </pre> */ public class AndroidResource extends AbstractBuildRule implements AndroidPackageable, HasAndroidResourceDeps, InitializableFromDisk<String>, SupportsInputBasedRuleKey { private static final BuildableProperties PROPERTIES = new BuildableProperties(ANDROID, LIBRARY); @VisibleForTesting static final String METADATA_KEY_FOR_R_DOT_JAVA_PACKAGE = "METADATA_KEY_FOR_R_DOT_JAVA_PACKAGE"; @AddToRuleKey @Nullable private final SourcePath res; @SuppressWarnings("PMD.UnusedPrivateField") @AddToRuleKey private final ImmutableSortedMap<String, SourcePath> resSrcs; @AddToRuleKey @Nullable private final SourcePath assets; @SuppressWarnings("PMD.UnusedPrivateField") @AddToRuleKey private final ImmutableSortedMap<String, SourcePath> assetsSrcs; private final Path pathToTextSymbolsDir; private final Path pathToTextSymbolsFile; private final Path pathToRDotJavaPackageFile; @AddToRuleKey @Nullable private final SourcePath manifestFile; @AddToRuleKey private final Supplier<ImmutableSortedSet<? extends SourcePath>> symbolsOfDeps; @AddToRuleKey private final boolean hasWhitelistedStrings; @AddToRuleKey private final boolean resourceUnion; private final boolean isGrayscaleImageProcessingEnabled; private final ImmutableSortedSet<BuildRule> deps; private final BuildOutputInitializer<String> buildOutputInitializer; /** * This is the original {@code package} argument passed to this rule. */ @AddToRuleKey @Nullable private final String rDotJavaPackageArgument; /** * Supplier that returns the package for the Java class generated for the resources in * {@link #res}, if any. The value for this supplier is determined, as follows: * <ul> * <li>If the user specified a {@code package} argument, the supplier will return that value. * <li>Failing that, when the rule is built, it will parse the package from the file specified * by the {@code manifest} so that it can be returned by this supplier. (Note this also * needs to work correctly if the rule is initialized from disk.) * <li>In all other cases (e.g., both {@code package} and {@code manifest} are unspecified), the * behavior is undefined. * </ul> */ private final Supplier<String> rDotJavaPackageSupplier; private final AtomicReference<String> rDotJavaPackage; public AndroidResource( BuildRuleParams buildRuleParams, SourcePathRuleFinder ruleFinder, final ImmutableSortedSet<BuildRule> deps, @Nullable SourcePath res, ImmutableSortedMap<Path, SourcePath> resSrcs, @Nullable String rDotJavaPackageArgument, @Nullable SourcePath assets, ImmutableSortedMap<Path, SourcePath> assetsSrcs, @Nullable SourcePath manifestFile, Supplier<ImmutableSortedSet<? extends SourcePath>> symbolFilesFromDeps, boolean hasWhitelistedStrings, boolean resourceUnion, boolean isGrayscaleImageProcessingEnabled) { super( buildRuleParams.copyAppendingExtraDeps( Suppliers.compose(ruleFinder::filterBuildRuleInputs, symbolFilesFromDeps))); if (res != null && rDotJavaPackageArgument == null && manifestFile == null) { throw new HumanReadableException( "When the 'res' is specified for android_resource() %s, at least one of 'package' or " + "'manifest' must be specified.", getBuildTarget()); } this.res = res; this.resSrcs = MoreMaps.transformKeysAndSort(resSrcs, Path::toString); this.assets = assets; this.assetsSrcs = MoreMaps.transformKeysAndSort(assetsSrcs, Path::toString); this.manifestFile = manifestFile; this.symbolsOfDeps = symbolFilesFromDeps; this.hasWhitelistedStrings = hasWhitelistedStrings; this.resourceUnion = resourceUnion; BuildTarget buildTarget = buildRuleParams.getBuildTarget(); this.pathToTextSymbolsDir = BuildTargets.getGenPath(getProjectFilesystem(), buildTarget, "__%s_text_symbols__"); this.pathToTextSymbolsFile = pathToTextSymbolsDir.resolve("R.txt"); this.pathToRDotJavaPackageFile = pathToTextSymbolsDir.resolve("RDotJavaPackage.txt"); this.deps = deps; this.buildOutputInitializer = new BuildOutputInitializer<>(buildTarget, this); this.rDotJavaPackageArgument = rDotJavaPackageArgument; this.rDotJavaPackage = new AtomicReference<>(rDotJavaPackageArgument); this.rDotJavaPackageSupplier = () -> { String rDotJavaPackage1 = AndroidResource.this.rDotJavaPackage.get(); if (rDotJavaPackage1 != null) { return rDotJavaPackage1; } else { throw new RuntimeException( "rDotJavaPackage for " + AndroidResource.this.getBuildTarget().toString() + " was requested before it was made available."); } }; this.isGrayscaleImageProcessingEnabled = isGrayscaleImageProcessingEnabled; } public AndroidResource( final BuildRuleParams buildRuleParams, SourcePathRuleFinder ruleFinder, final ImmutableSortedSet<BuildRule> deps, @Nullable SourcePath res, ImmutableSortedMap<Path, SourcePath> resSrcs, @Nullable String rDotJavaPackageArgument, @Nullable SourcePath assets, ImmutableSortedMap<Path, SourcePath> assetsSrcs, @Nullable SourcePath manifestFile, boolean hasWhitelistedStrings) { this( buildRuleParams, ruleFinder, deps, res, resSrcs, rDotJavaPackageArgument, assets, assetsSrcs, manifestFile, hasWhitelistedStrings, /* resourceUnion */ false, /* isGrayscaleImageProcessingEnabled */ false); } public AndroidResource( final BuildRuleParams buildRuleParams, SourcePathRuleFinder ruleFinder, final ImmutableSortedSet<BuildRule> deps, @Nullable SourcePath res, ImmutableSortedMap<Path, SourcePath> resSrcs, @Nullable String rDotJavaPackageArgument, @Nullable SourcePath assets, ImmutableSortedMap<Path, SourcePath> assetsSrcs, @Nullable SourcePath manifestFile, boolean hasWhitelistedStrings, boolean resourceUnion, boolean isGrayscaleImageProcessingEnabled) { this( buildRuleParams, ruleFinder, deps, res, resSrcs, rDotJavaPackageArgument, assets, assetsSrcs, manifestFile, () -> FluentIterable.from(buildRuleParams.getDeps()) .filter(HasAndroidResourceDeps.class) .filter(input -> input.getRes() != null) .transform(HasAndroidResourceDeps::getPathToTextSymbolsFile) .toSortedSet(Ordering.natural()), hasWhitelistedStrings, resourceUnion, isGrayscaleImageProcessingEnabled); } @Override @Nullable public SourcePath getRes() { return res; } @Override @Nullable public SourcePath getAssets() { return assets; } @Nullable public SourcePath getManifestFile() { return manifestFile; } @Override public ImmutableList<Step> getBuildSteps( BuildContext context, final BuildableContext buildableContext) { buildableContext.recordArtifact(Preconditions.checkNotNull(pathToTextSymbolsFile)); buildableContext.recordArtifact(Preconditions.checkNotNull(pathToRDotJavaPackageFile)); ImmutableList.Builder<Step> steps = ImmutableList.builder(); steps.add( new MakeCleanDirectoryStep( getProjectFilesystem(), Preconditions.checkNotNull(pathToTextSymbolsDir))); if (getRes() == null) { return steps .add(new TouchStep(getProjectFilesystem(), pathToTextSymbolsFile)) .add(new WriteFileStep( getProjectFilesystem(), rDotJavaPackageArgument == null ? "" : rDotJavaPackageArgument, pathToRDotJavaPackageFile, false /* executable */)) .build(); } // If the 'package' was not specified for this android_resource(), then attempt to parse it // from the AndroidManifest.xml. if (rDotJavaPackageArgument == null) { Preconditions.checkNotNull( manifestFile, "manifestFile cannot be null when res is non-null and rDotJavaPackageArgument is " + "null. This should already be enforced by the constructor."); steps.add( new ExtractFromAndroidManifestStep( context.getSourcePathResolver().getAbsolutePath(manifestFile), getProjectFilesystem(), buildableContext, METADATA_KEY_FOR_R_DOT_JAVA_PACKAGE, Preconditions.checkNotNull(pathToRDotJavaPackageFile))); } else { steps.add(new WriteFileStep( getProjectFilesystem(), rDotJavaPackageArgument, pathToRDotJavaPackageFile, false /* executable */)); } ImmutableSet<Path> pathsToSymbolsOfDeps = symbolsOfDeps.get().stream() .map(context.getSourcePathResolver()::getAbsolutePath) .collect(MoreCollectors.toImmutableSet()); steps.add( new MiniAapt( context.getSourcePathResolver(), getProjectFilesystem(), Preconditions.checkNotNull(res), Preconditions.checkNotNull(pathToTextSymbolsFile), pathsToSymbolsOfDeps, resourceUnion, isGrayscaleImageProcessingEnabled)); return steps.build(); } @Override @Nullable public SourcePath getSourcePathToOutput() { return new ExplicitBuildTargetSourcePath(getBuildTarget(), pathToTextSymbolsDir); } @Override public SourcePath getPathToTextSymbolsFile() { return new ExplicitBuildTargetSourcePath(getBuildTarget(), pathToTextSymbolsFile); } @Override public SourcePath getPathToRDotJavaPackageFile() { return new ExplicitBuildTargetSourcePath(getBuildTarget(), pathToRDotJavaPackageFile); } @Override public String getRDotJavaPackage() { String rDotJavaPackage = rDotJavaPackageSupplier.get(); if (rDotJavaPackage == null) { throw new RuntimeException("No package for " + getBuildTarget()); } return rDotJavaPackage; } @Override public BuildableProperties getProperties() { return PROPERTIES; } @Override public String initializeFromDisk(OnDiskBuildInfo onDiskBuildInfo) { String rDotJavaPackageFromFile = getProjectFilesystem().readFirstLine(pathToRDotJavaPackageFile).get(); if (rDotJavaPackageArgument != null && !rDotJavaPackageFromFile.equals(rDotJavaPackageArgument)) { throw new RuntimeException(String.format( "%s contains incorrect rDotJavaPackage (%s!=%s)", pathToRDotJavaPackageFile, rDotJavaPackageFromFile, rDotJavaPackageArgument)); } rDotJavaPackage.set(rDotJavaPackageFromFile); return rDotJavaPackageFromFile; } @Override public BuildOutputInitializer<String> getBuildOutputInitializer() { return buildOutputInitializer; } @Override public Iterable<AndroidPackageable> getRequiredPackageables() { return AndroidPackageableCollector.getPackageableRules(deps); } @Override public void addToCollector(AndroidPackageableCollector collector) { if (res != null) { if (hasWhitelistedStrings) { collector.addStringWhitelistedResourceDirectory(getBuildTarget(), res); } else { collector.addResourceDirectory(getBuildTarget(), res); } } if (assets != null) { collector.addAssetsDirectory(getBuildTarget(), assets); } } }
apache-2.0
marvinmarnold/NetCipher
libnetcipher/src/info/guardianproject/netcipher/proxy/OrbotHelper.java
7683
package info.guardianproject.netcipher.proxy; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.content.pm.PackageManager; import android.content.pm.ResolveInfo; import android.net.Uri; import android.text.TextUtils; import android.util.Log; import java.util.List; public class OrbotHelper implements ProxyHelper { private final static int REQUEST_CODE_STATUS = 100; public final static String ORBOT_PACKAGE_NAME = "org.torproject.android"; public final static String ORBOT_MARKET_URI = "market://details?id=" + ORBOT_PACKAGE_NAME; public final static String ORBOT_FDROID_URI = "https://f-droid.org/repository/browse/?fdid=" + ORBOT_PACKAGE_NAME; public final static String ORBOT_PLAY_URI = "https://play.google.com/store/apps/details?id=" + ORBOT_PACKAGE_NAME; /** * A request to Orbot to transparently start Tor services */ public final static String ACTION_START = "org.torproject.android.intent.action.START"; /** * {@link Intent} send by Orbot with {@code ON/OFF/STARTING/STOPPING} status */ public final static String ACTION_STATUS = "org.torproject.android.intent.action.STATUS"; /** * {@code String} that contains a status constant: {@link #STATUS_ON}, * {@link #STATUS_OFF}, {@link #STATUS_STARTING}, or * {@link #STATUS_STOPPING} */ public final static String EXTRA_STATUS = "org.torproject.android.intent.extra.STATUS"; /** * A {@link String} {@code packageName} for Orbot to direct its status reply * to, used in {@link #ACTION_START} {@link Intent}s sent to Orbot */ public final static String EXTRA_PACKAGE_NAME = "org.torproject.android.intent.extra.PACKAGE_NAME"; /** * All tor-related services and daemons are stopped */ public final static String STATUS_OFF = "OFF"; /** * All tor-related services and daemons have completed starting */ public final static String STATUS_ON = "ON"; public final static String STATUS_STARTING = "STARTING"; public final static String STATUS_STOPPING = "STOPPING"; /** * The user has disabled the ability for background starts triggered by * apps. Fallback to the old Intent that brings up Orbot. */ public final static String STATUS_STARTS_DISABLED = "STARTS_DISABLED"; public final static String ACTION_START_TOR = "org.torproject.android.START_TOR"; public final static String ACTION_REQUEST_HS = "org.torproject.android.REQUEST_HS_PORT"; public final static int START_TOR_RESULT = 0x048079234; public final static int HS_REQUEST_CODE = 9999; private OrbotHelper() { // only static utility methods, do not instantiate } public static boolean isOrbotRunning(Context context) { int procId = TorServiceUtils.findProcessId(context); return (procId != -1); } public static boolean isOrbotInstalled(Context context) { return isAppInstalled(context, ORBOT_PACKAGE_NAME); } private static boolean isAppInstalled(Context context, String uri) { try { PackageManager pm = context.getPackageManager(); pm.getPackageInfo(uri, PackageManager.GET_ACTIVITIES); return true; } catch (PackageManager.NameNotFoundException e) { return false; } } public static void requestHiddenServiceOnPort(Activity activity, int port) { Intent intent = new Intent(ACTION_REQUEST_HS); intent.setPackage(ORBOT_PACKAGE_NAME); intent.putExtra("hs_port", port); activity.startActivityForResult(intent, HS_REQUEST_CODE); } /** * First, checks whether Orbot is installed. If Orbot is installed, then a * broadcast {@link Intent} is sent to request Orbot to start transparently * in the background. When Orbot receives this {@code Intent}, it will * immediately reply to this all with its status via an * {@link #ACTION_STATUS} {@code Intent} that is broadcast to the * {@code packageName} of the provided {@link Context} (i.e. * {@link Context#getPackageName()}. * * @param context the app {@link Context} will receive the reply * @return whether the start request was sent to Orbot */ public static boolean requestStartTor(Context context) { if (OrbotHelper.isOrbotInstalled(context)) { Log.i("OrbotHelper", "requestStartTor " + context.getPackageName()); Intent intent = getOrbotStartIntent(); intent.putExtra(EXTRA_PACKAGE_NAME, context.getPackageName()); context.sendBroadcast(intent); return true; } return false; } public static Intent getOrbotStartIntent() { Intent intent = new Intent(ACTION_START); intent.setPackage(ORBOT_PACKAGE_NAME); return intent; } /** * First, checks whether Orbot is installed, then checks whether Orbot is * running. If Orbot is installed and not running, then an {@link Intent} is * sent to request Orbot to start, which will show the main Orbot screen. * The result will be returned in * {@link Activity#onActivityResult(int requestCode, int resultCode, Intent data)} * with a {@code requestCode} of {@link START_TOR_RESULT} * * @param activity the {@link Activity} that gets the * {@code START_TOR_RESULT} result * @return whether the start request was sent to Orbot */ public static boolean requestShowOrbotStart(Activity activity) { if (OrbotHelper.isOrbotInstalled(activity)) { if (!OrbotHelper.isOrbotRunning(activity)) { Intent intent = getShowOrbotStartIntent(); activity.startActivityForResult(intent, START_TOR_RESULT); return true; } } return false; } public static Intent getShowOrbotStartIntent() { Intent intent = new Intent(ACTION_START_TOR); intent.setPackage(ORBOT_PACKAGE_NAME); intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); return intent; } public static Intent getOrbotInstallIntent(Context context) { final Intent intent = new Intent(Intent.ACTION_VIEW); intent.setData(Uri.parse(ORBOT_MARKET_URI)); PackageManager pm = context.getPackageManager(); List<ResolveInfo> resInfos = pm.queryIntentActivities(intent, 0); String foundPackageName = null; for (ResolveInfo r : resInfos) { Log.i("OrbotHelper", "market: " + r.activityInfo.packageName); if (TextUtils.equals(r.activityInfo.packageName, FDROID_PACKAGE_NAME) || TextUtils.equals(r.activityInfo.packageName, PLAY_PACKAGE_NAME)) { foundPackageName = r.activityInfo.packageName; break; } } if (foundPackageName == null) { intent.setData(Uri.parse(ORBOT_FDROID_URI)); } else { intent.setPackage(foundPackageName); } return intent; } @Override public boolean isInstalled(Context context) { return isOrbotInstalled(context); } @Override public void requestStatus(Context context) { isOrbotRunning(context); } @Override public boolean requestStart(Context context) { return requestStartTor(context); } @Override public Intent getInstallIntent(Context context) { return getOrbotInstallIntent(context); } @Override public Intent getStartIntent(Context context) { return getOrbotStartIntent(); } @Override public String getName() { return "Orbot"; } }
apache-2.0
meetdestiny/geronimo-trader
modules/transaction/src/java/org/apache/geronimo/transaction/DefaultInstanceContext.java
2413
/** * * Copyright 2003-2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.geronimo.transaction; import java.util.HashMap; import java.util.Map; import java.util.Set; /** * Simple implementation of ComponentContext satisfying invariant. * * @version $Rev$ $Date$ * * */ public class DefaultInstanceContext implements InstanceContext { private final Map connectionManagerMap = new HashMap(); private final Set unshareableResources; private final Set applicationManagedSecurityResources; private int callDepth; private boolean dead = false; public DefaultInstanceContext(Set unshareableResources, Set applicationManagedSecurityResources) { this.unshareableResources = unshareableResources; this.applicationManagedSecurityResources = applicationManagedSecurityResources; } public Object getId() { return null; } public Object getContainerId() { return null; } public void associate() throws Exception { } public void flush() throws Exception { } public void beforeCommit() throws Exception { } public void afterCommit(boolean status) throws Exception { } public void unassociate() throws Throwable { } public Map getConnectionManagerMap() { return connectionManagerMap; } public Set getUnshareableResources() { return unshareableResources; } public Set getApplicationManagedSecurityResources() { return applicationManagedSecurityResources; } public boolean isInCall() { return callDepth > 0; } public void enter() { callDepth++; } public void exit() { assert isInCall(); callDepth--; } public boolean isDead() { return dead; } public void die() { dead = true; } }
apache-2.0
limespace/VaadinGraphvizComponent
vizcomponent-root/vizcomponent/src/main/java/com/vaadin/pontus/vizcomponent/client/VizComponentServerRpc.java
417
package com.vaadin.pontus.vizcomponent.client; import com.vaadin.shared.MouseEventDetails; import com.vaadin.shared.communication.ServerRpc; // ServerRpc is used to pass events from client to server public interface VizComponentServerRpc extends ServerRpc { public void nodeClicked(String nodeId, MouseEventDetails mouseDetails); public void edgeClicked(String edgeId, MouseEventDetails mouseDetails); }
apache-2.0
jaehong-kim/pinpoint
web/src/main/java/com/navercorp/pinpoint/web/vo/callstacks/RecordFactory.java
11203
/* * Copyright 2019 NAVER Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.navercorp.pinpoint.web.vo.callstacks; import java.util.ArrayList; import java.util.List; import java.util.Objects; import com.navercorp.pinpoint.common.server.bo.AnnotationBo; import com.navercorp.pinpoint.common.server.bo.ApiMetaDataBo; import com.navercorp.pinpoint.common.server.bo.MethodTypeEnum; import com.navercorp.pinpoint.loader.service.AnnotationKeyRegistryService; import com.navercorp.pinpoint.loader.service.ServiceTypeRegistryService; import com.navercorp.pinpoint.common.trace.AnnotationKey; import com.navercorp.pinpoint.common.trace.AnnotationKeyMatcher; import com.navercorp.pinpoint.common.trace.ServiceType; import com.navercorp.pinpoint.common.server.util.AnnotationUtils; import com.navercorp.pinpoint.common.server.trace.ApiDescription; import com.navercorp.pinpoint.common.server.trace.ApiDescriptionParser; import com.navercorp.pinpoint.web.calltree.span.Align; import com.navercorp.pinpoint.web.calltree.span.CallTreeNode; import com.navercorp.pinpoint.web.service.AnnotationKeyMatcherService; import com.navercorp.pinpoint.web.service.ProxyRequestTypeRegistryService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * @author minwoo.jung */ public class RecordFactory { private final Logger logger = LoggerFactory.getLogger(this.getClass()); // spans with id = 0 are regarded as root - start at 1 private int idGen = 1; private final AnnotationKeyMatcherService annotationKeyMatcherService; private final ServiceTypeRegistryService registry; private final AnnotationKeyRegistryService annotationKeyRegistryService; private final ApiDescriptionParser apiDescriptionParser = new ApiDescriptionParser(); private final AnnotationRecordFormatter annotationRecordFormatter; private final ProxyRequestTypeRegistryService proxyRequestTypeRegistryService; public RecordFactory(final AnnotationKeyMatcherService annotationKeyMatcherService, final ServiceTypeRegistryService registry, final AnnotationKeyRegistryService annotationKeyRegistryService, final ProxyRequestTypeRegistryService proxyRequestTypeRegistryService) { this.annotationKeyMatcherService = annotationKeyMatcherService; this.registry = registry; this.annotationKeyRegistryService = annotationKeyRegistryService; this.proxyRequestTypeRegistryService = proxyRequestTypeRegistryService; this.annotationRecordFormatter = new AnnotationRecordFormatter(proxyRequestTypeRegistryService); } public Record get(final CallTreeNode node) { final Align align = node.getAlign(); align.setId(getNextId()); final int parentId = getParentId(node); Api api = getApi(align); final String argument = getArgument(align); final Record record = new DefaultRecord(align.getDepth(), align.getId(), parentId, true, api.getTitle(), argument, align.getStartTime(), align.getElapsed(), align.getGap(), align.getAgentId(), align.getApplicationId(), registry.findServiceType(align.getServiceType()), align.getDestinationId(), align.hasChild(), false, align.getTransactionId(), align.getSpanId(), align.getExecutionMilliseconds(), api.getMethodTypeEnum(), true); record.setSimpleClassName(api.getClassName()); record.setFullApiDescription(api.getDescription()); return record; } private String getArgument(final Align align) { final String rpc = align.getRpc(); if (rpc != null) { return rpc; } return getDisplayArgument(align); } private String getDisplayArgument(Align align) { final AnnotationBo displayArgument = getDisplayArgument0(align.getServiceType(), align.getAnnotationBoList()); if (displayArgument == null) { return ""; } final AnnotationKey key = findAnnotationKey(displayArgument.getKey()); return this.annotationRecordFormatter.formatArguments(key, displayArgument, align); } private AnnotationBo getDisplayArgument0(final short serviceType, final List<AnnotationBo> annotationBoList) { if (annotationBoList == null) { return null; } final AnnotationKeyMatcher matcher = annotationKeyMatcherService.findAnnotationKeyMatcher(serviceType); if (matcher == null) { return null; } for (AnnotationBo annotation : annotationBoList) { int key = annotation.getKey(); if (matcher.matches(key)) { return annotation; } } return null; } public Record getFilteredRecord(final CallTreeNode node, String apiTitle) { final Align align = node.getAlign(); align.setId(getNextId()); final int parentId = getParentId(node); // Api api = getApi(align); final Record record = new DefaultRecord(align.getDepth(), align.getId(), parentId, true, apiTitle, "", align.getStartTime(), align.getElapsed(), align.getGap(), "UNKNOWN", align.getApplicationId(), ServiceType.UNKNOWN, "", false, false, align.getTransactionId(), align.getSpanId(), align.getExecutionMilliseconds(), MethodTypeEnum.DEFAULT, false); return record; } public Record getException(final int depth, final int parentId, final Align align) { if (!align.hasException()) { return null; } return new ExceptionRecord(depth, getNextId(), parentId, align); } public List<Record> getAnnotations(final int depth, final int parentId, Align align) { List<Record> list = new ArrayList<>(); for (AnnotationBo annotation : align.getAnnotationBoList()) { final AnnotationKey key = findAnnotationKey(annotation.getKey()); if (key.isViewInRecordSet()) { final String title = this.annotationRecordFormatter.formatTitle(key, annotation, align); final String arguments = this.annotationRecordFormatter.formatArguments(key, annotation, align); final Record record = new AnnotationRecord(depth, getNextId(), parentId, title, arguments, annotation.isAuthorized()); list.add(record); } } return list; } public Record getParameter(final int depth, final int parentId, final String method, final String argument) { return new ParameterRecord(depth, getNextId(), parentId, method, argument); } int getParentId(final CallTreeNode node) { final CallTreeNode parent = node.getParent(); if (parent == null) { if (!node.getAlign().isSpan()) { throw new IllegalStateException("parent is null. node=" + node); } return 0; } return parent.getAlign().getId(); } private Api getApi(final Align align) { final AnnotationBo annotation = AnnotationUtils.findAnnotationBo(align.getAnnotationBoList(), AnnotationKey.API_METADATA); if (annotation != null) { final Api api = new Api(); final ApiMetaDataBo apiMetaData = (ApiMetaDataBo) annotation.getValue(); String apiInfo = getApiInfo(apiMetaData); api.setTitle(apiInfo); api.setDescription(apiInfo); if (apiMetaData.getMethodTypeEnum() == MethodTypeEnum.DEFAULT) { try { ApiDescription apiDescription = apiDescriptionParser.parse(api.description); api.setTitle(apiDescription.getSimpleMethodDescription()); api.setClassName(apiDescription.getSimpleClassName()); } catch (Exception ignored) { } } api.setMethodTypeEnum(apiMetaData.getMethodTypeEnum()); return api; } else { final Api api = new Api(); AnnotationKey apiMetaDataError = getApiMetaDataError(align.getAnnotationBoList()); api.setTitle(apiMetaDataError.getName()); return api; } } private String getApiInfo(ApiMetaDataBo apiMetaDataBo) { if (apiMetaDataBo.getLineNumber() != -1) { return apiMetaDataBo.getApiInfo() + ":" + apiMetaDataBo.getLineNumber(); } else { return apiMetaDataBo.getApiInfo(); } } public AnnotationKey getApiMetaDataError(List<AnnotationBo> annotationBoList) { for (AnnotationBo bo : annotationBoList) { AnnotationKey apiErrorCode = annotationKeyRegistryService.findApiErrorCode(bo.getKey()); if (apiErrorCode != null) { return apiErrorCode; } } // could not find a more specific error - returns generalized error return AnnotationKey.ERROR_API_METADATA_ERROR; } private AnnotationKey findAnnotationKey(int key) { return annotationKeyRegistryService.findAnnotationKey(key); } private int getNextId() { return idGen++; } private static class Api { private String title = ""; private String className = ""; private String description = ""; private MethodTypeEnum methodTypeEnum = MethodTypeEnum.DEFAULT; public Api() { } public String getTitle() { return title; } public void setTitle(String title) { this.title = title; } public String getClassName() { return className; } public void setClassName(String className) { this.className = className; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public MethodTypeEnum getMethodTypeEnum() { return methodTypeEnum; } public void setMethodTypeEnum(MethodTypeEnum methodTypeEnum) { this.methodTypeEnum = Objects.requireNonNull(methodTypeEnum, "methodTypeEnum"); } } }
apache-2.0
CloudSlang/cs-actions
cs-maps/src/main/java/io/cloudslang/content/maps/constants/DefaultInputValues.java
1715
/* * (c) Copyright 2020 EntIT Software LLC, a Micro Focus company, L.P. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License v2.0 which accompany this distribution. * * The Apache License is available at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.cloudslang.content.maps.constants; import org.apache.commons.lang3.StringUtils; public final class DefaultInputValues { public static final String MAP = StringUtils.EMPTY; public static final String VALUE = StringUtils.EMPTY; public static final String MAP_START = StringUtils.EMPTY; public static final String MAP_END = StringUtils.EMPTY; public static final String ELEMENT_WRAPPER = StringUtils.EMPTY; public static final String MATCH_TYPE = StringUtils.EMPTY; public static final String STRIP_WHITESPACES = String.valueOf(false); public static final String ELEMENTS = StringUtils.EMPTY; public static final String SORT_BY = StringUtils.EMPTY; public static final String SORT_ORDER = "asc"; public static final String METHOD = StringUtils.EMPTY; public static final String METHOD_VALUE = StringUtils.EMPTY; public static final String HANDLE_EMPTY_VALUE = String.valueOf(false); public static final String IGNORE_CASE = String.valueOf(false); private DefaultInputValues() { } }
apache-2.0
bf8086/alluxio
tests/src/test/java/alluxio/client/rest/S3ClientRestApiTest.java
36860
/* * The Alluxio Open Foundation licenses this work under the Apache License, version 2.0 * (the "License"). You may not use this work except in compliance with the License, which is * available at www.apache.org/licenses/LICENSE-2.0 * * This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied, as more fully set forth in the License. * * See the NOTICE file distributed with this work for information regarding copyright ownership. */ package alluxio.client.rest; import static org.junit.Assert.assertEquals; import alluxio.AlluxioURI; import alluxio.Constants; import alluxio.client.file.FileInStream; import alluxio.client.file.FileSystem; import alluxio.client.file.FileSystemContext; import alluxio.client.file.URIStatus; import alluxio.conf.PropertyKey; import alluxio.conf.ServerConfiguration; import alluxio.exception.FileDoesNotExistException; import alluxio.grpc.SetAttributePOptions; import alluxio.master.file.FileSystemMaster; import alluxio.master.file.contexts.CreateDirectoryContext; import alluxio.master.file.contexts.CreateFileContext; import alluxio.master.file.contexts.GetStatusContext; import alluxio.master.file.contexts.ListStatusContext; import alluxio.proxy.s3.CompleteMultipartUploadResult; import alluxio.proxy.s3.InitiateMultipartUploadResult; import alluxio.proxy.s3.ListAllMyBucketsResult; import alluxio.proxy.s3.ListBucketOptions; import alluxio.proxy.s3.ListBucketResult; import alluxio.proxy.s3.ListPartsResult; import alluxio.proxy.s3.S3Constants; import alluxio.proxy.s3.S3RestUtils; import alluxio.security.User; import alluxio.security.authentication.AuthType; import alluxio.security.authorization.Mode; import alluxio.security.authorization.ModeParser; import alluxio.testutils.LocalAlluxioClusterResource; import alluxio.util.CommonUtils; import alluxio.wire.FileInfo; import com.fasterxml.jackson.dataformat.xml.XmlMapper; import com.google.common.collect.Lists; import com.google.common.io.BaseEncoding; import org.apache.commons.codec.binary.Hex; import org.apache.commons.io.IOUtils; import org.junit.Assert; import org.junit.Before; import org.junit.ClassRule; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import org.junit.rules.TestRule; import java.io.ByteArrayInputStream; import java.net.HttpURLConnection; import java.security.MessageDigest; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.security.auth.Subject; import javax.ws.rs.HttpMethod; import javax.ws.rs.core.Response; /** * Test cases for {@link alluxio.proxy.s3.S3RestServiceHandler}. */ public final class S3ClientRestApiTest extends RestApiTest { private static final int DATA_SIZE = 16 * Constants.KB; // cannot be too large, since all block streams are open until file is closed, and may run out of // block worker clients. private static final int LARGE_DATA_SIZE = 256 * Constants.KB; private static final GetStatusContext GET_STATUS_CONTEXT = GetStatusContext.defaults(); private static final Map<String, String> NO_PARAMS = new HashMap<>(); private static final XmlMapper XML_MAPPER = new XmlMapper(); private static final String S3_SERVICE_PREFIX = "s3"; private static final String BUCKET_SEPARATOR = ":"; private FileSystem mFileSystem; private FileSystemMaster mFileSystemMaster; // TODO(chaomin): Rest API integration tests are only run in NOSASL mode now. Need to // fix the test setup in SIMPLE mode. @ClassRule public static LocalAlluxioClusterResource sResource = new LocalAlluxioClusterResource.Builder() .setProperty(PropertyKey.SECURITY_AUTHORIZATION_PERMISSION_ENABLED, "false") .setProperty(PropertyKey.SECURITY_AUTHENTICATION_TYPE, AuthType.NOSASL.getAuthName()) .setProperty(PropertyKey.USER_FILE_BUFFER_BYTES, "1KB") .build(); @Rule public TestRule mResetRule = sResource.getResetResource(); @Rule public TemporaryFolder mFolder = new TemporaryFolder(); @Before public void before() throws Exception { mHostname = sResource.get().getHostname(); mPort = sResource.get().getProxyProcess().getWebLocalPort(); mFileSystemMaster = sResource.get().getLocalAlluxioMaster().getMasterProcess() .getMaster(FileSystemMaster.class); mFileSystem = sResource.get().getClient(); } @Test public void listAllMyBuckets() throws Exception { Mode mode = ModeParser.parse("777"); SetAttributePOptions options = SetAttributePOptions.newBuilder().setMode(mode.toProto()).setRecursive(true).build(); mFileSystem.setAttribute(new AlluxioURI("/"), options); Subject subject = new Subject(); subject.getPrincipals().add(new User("user0")); sResource.get().getClient(FileSystemContext.create(subject, ServerConfiguration.global())) .createDirectory(new AlluxioURI("/bucket0")); SetAttributePOptions setAttributeOptions = SetAttributePOptions.newBuilder().setOwner("user0").build(); mFileSystem.setAttribute(new AlluxioURI("/bucket0"), setAttributeOptions); subject = new Subject(); subject.getPrincipals().add(new User("user1")); sResource.get().getClient(FileSystemContext.create(subject, ServerConfiguration.global())) .createDirectory(new AlluxioURI("/bucket1")); setAttributeOptions = SetAttributePOptions.newBuilder().setOwner("user1").build(); mFileSystem.setAttribute(new AlluxioURI("/bucket1"), setAttributeOptions); ListAllMyBucketsResult expected = new ListAllMyBucketsResult(Collections.EMPTY_LIST); final TestCaseOptions requestOptions = TestCaseOptions.defaults() .setContentType(TestCaseOptions.XML_CONTENT_TYPE); new TestCase(mHostname, mPort, S3_SERVICE_PREFIX + "/", NO_PARAMS, HttpMethod.GET, expected, requestOptions).run(); expected = new ListAllMyBucketsResult(Lists.newArrayList("bucket0")); requestOptions.setAuthorization("AWS user0:"); new TestCase(mHostname, mPort, S3_SERVICE_PREFIX + "/", NO_PARAMS, HttpMethod.GET, expected, requestOptions).run(); expected = new ListAllMyBucketsResult(Lists.newArrayList("bucket1")); requestOptions.setAuthorization("AWS user1:"); new TestCase(mHostname, mPort, S3_SERVICE_PREFIX + "/", NO_PARAMS, HttpMethod.GET, expected, requestOptions).run(); } @Test public void listBucket() throws Exception { mFileSystem.createDirectory(new AlluxioURI("/bucket")); mFileSystem.createDirectory(new AlluxioURI("/bucket/folder0")); mFileSystem.createDirectory(new AlluxioURI("/bucket/folder1")); mFileSystem.createFile(new AlluxioURI("/bucket/file0")); mFileSystem.createFile(new AlluxioURI("/bucket/file1")); mFileSystem.createFile(new AlluxioURI("/bucket/folder0/file0")); mFileSystem.createFile(new AlluxioURI("/bucket/folder0/file1")); List<URIStatus> statuses = mFileSystem.listStatus(new AlluxioURI("/bucket")); ListBucketResult expected = new ListBucketResult("bucket", statuses, ListBucketOptions.defaults()); new TestCase(mHostname, mPort, S3_SERVICE_PREFIX + "/bucket", NO_PARAMS, HttpMethod.GET, expected, TestCaseOptions.defaults().setContentType(TestCaseOptions.XML_CONTENT_TYPE)).run(); assertEquals("file0", expected.getContents().get(0).getKey()); assertEquals("file1", expected.getContents().get(1).getKey()); assertEquals(Lists.newArrayList("folder0", "folder1"), expected.getCommonPrefixes().getCommonPrefixes()); statuses = mFileSystem.listStatus(new AlluxioURI("/bucket/folder0")); expected = new ListBucketResult("bucket", statuses, ListBucketOptions.defaults().setPrefix("/folder0")); final Map<String, String> parameters = new HashMap<>(); parameters.put("prefix", "/folder0"); new TestCase(mHostname, mPort, S3_SERVICE_PREFIX + "/bucket", parameters, HttpMethod.GET, expected, TestCaseOptions.defaults().setContentType(TestCaseOptions.XML_CONTENT_TYPE)).run(); assertEquals("folder0/file0", expected.getContents().get(0).getKey()); assertEquals("folder0/file1", expected.getContents().get(1).getKey()); assertEquals(0, expected.getCommonPrefixes().getCommonPrefixes().size()); } @Test public void listBucketPagination() throws Exception { AlluxioURI uri = new AlluxioURI("/bucket"); mFileSystem.createDirectory(uri); mFileSystem.createDirectory(new AlluxioURI("/bucket/folder0")); mFileSystem.createDirectory(new AlluxioURI("/bucket/folder1")); mFileSystem.createFile(new AlluxioURI("/bucket/file0")); mFileSystem.createFile(new AlluxioURI("/bucket/file1")); mFileSystem.createFile(new AlluxioURI("/bucket/folder0/file0")); mFileSystem.createFile(new AlluxioURI("/bucket/folder0/file1")); List<URIStatus> statuses = mFileSystem.listStatus(new AlluxioURI("/bucket")); ListBucketResult expected = new ListBucketResult("bucket", statuses, ListBucketOptions.defaults().setMaxKeys(1)); String nextMarker = expected.getNextMarker(); final Map<String, String> parameters = new HashMap<>(); parameters.put("max-keys", "1"); new TestCase(mHostname, mPort, S3_SERVICE_PREFIX + "/bucket", parameters, HttpMethod.GET, expected, TestCaseOptions.defaults().setContentType(TestCaseOptions.XML_CONTENT_TYPE)).run(); assertEquals("file0", expected.getContents().get(0).getKey()); assertEquals(0, expected.getCommonPrefixes().getCommonPrefixes().size()); parameters.put("marker", nextMarker); expected = new ListBucketResult("bucket", statuses, ListBucketOptions.defaults().setMaxKeys(1).setMarker(nextMarker)); nextMarker = expected.getNextMarker(); new TestCase(mHostname, mPort, S3_SERVICE_PREFIX + "/bucket", parameters, HttpMethod.GET, expected, TestCaseOptions.defaults().setContentType(TestCaseOptions.XML_CONTENT_TYPE)).run(); assertEquals("file1", expected.getContents().get(0).getKey()); assertEquals(0, expected.getCommonPrefixes().getCommonPrefixes().size()); parameters.put("marker", nextMarker); expected = new ListBucketResult("bucket", statuses, ListBucketOptions.defaults().setMaxKeys(1).setMarker(nextMarker)); nextMarker = expected.getNextMarker(); new TestCase(mHostname, mPort, S3_SERVICE_PREFIX + "/bucket", parameters, HttpMethod.GET, expected, TestCaseOptions.defaults().setContentType(TestCaseOptions.XML_CONTENT_TYPE)).run(); assertEquals(0, expected.getContents().size()); assertEquals(Lists.newArrayList("folder0"), expected.getCommonPrefixes().getCommonPrefixes()); } @Test public void putBucket() throws Exception { final String bucket = "bucket"; createBucketRestCall(bucket); // Verify the directory is created for the new bucket. AlluxioURI uri = new AlluxioURI(AlluxioURI.SEPARATOR + bucket); Assert.assertTrue(mFileSystemMaster .listStatus(uri, ListStatusContext.defaults()).isEmpty()); } @Test public void deleteBucket() throws Exception { final String bucket = "bucket-to-delete"; createBucketRestCall(bucket); // Verify the directory is created for the new bucket. AlluxioURI uri = new AlluxioURI(AlluxioURI.SEPARATOR + bucket); Assert.assertTrue(mFileSystemMaster .listStatus(uri, ListStatusContext.defaults()).isEmpty()); HttpURLConnection connection = deleteBucketRestCall(bucket); Assert.assertEquals(Response.Status.NO_CONTENT.getStatusCode(), connection.getResponseCode()); try { mFileSystemMaster.getFileInfo(uri, GET_STATUS_CONTEXT); } catch (FileDoesNotExistException e) { // expected return; } Assert.fail("bucket should have been removed"); } @Test public void deleteNonExistingBucket() throws Exception { final String bucketName = "non-existing-bucket"; try { // Delete a non-existing bucket should fail. deleteBucketRestCall(bucketName); } catch (AssertionError e) { // expected return; } Assert.fail("delete a non-existing bucket should fail"); } @Test public void deleteNonEmptyBucket() throws Exception { final String bucketName = "non-empty-bucket"; createBucketRestCall(bucketName); AlluxioURI uri = new AlluxioURI(AlluxioURI.SEPARATOR + bucketName); AlluxioURI fileUri = new AlluxioURI(uri.getPath() + "/file"); mFileSystemMaster.createFile(fileUri, CreateFileContext.defaults()); // Verify the directory is created for the new bucket, and file is created under it. Assert.assertFalse(mFileSystemMaster .listStatus(uri, ListStatusContext.defaults()).isEmpty()); try { // Delete a non-empty bucket should fail. deleteBucketRestCall(bucketName); } catch (AssertionError e) { // expected return; } Assert.fail("delete a non-empty bucket should fail"); } private void createObject(String objectKey, byte[] object, Long uploadId, Integer partNumber) throws Exception { Map<String, String> params = new HashMap<>(); if (uploadId != null) { params.put("uploadId", uploadId.toString()); } if (partNumber != null) { params.put("partNumber", partNumber.toString()); } createObjectRestCall(objectKey, object, null, params); } private void putObjectTest(String bucket, String objectKey, byte[] object, Long uploadId, Integer partNumber) throws Exception { final String fullObjectKey = bucket + AlluxioURI.SEPARATOR + objectKey; createObject(fullObjectKey, object, uploadId, partNumber); // Verify the object is created for the new bucket. AlluxioURI bucketURI = new AlluxioURI(AlluxioURI.SEPARATOR + bucket); AlluxioURI objectURI = new AlluxioURI(AlluxioURI.SEPARATOR + fullObjectKey); if (uploadId != null) { String tmpDir = S3RestUtils.getMultipartTemporaryDirForObject(bucketURI.getPath(), objectKey); bucketURI = new AlluxioURI(tmpDir); objectURI = new AlluxioURI(tmpDir + AlluxioURI.SEPARATOR + partNumber.toString()); } List<FileInfo> fileInfos = mFileSystemMaster.listStatus(bucketURI, ListStatusContext.defaults()); Assert.assertEquals(1, fileInfos.size()); Assert.assertEquals(objectURI.getPath(), fileInfos.get(0).getPath()); // Verify the object's content. FileInStream is = mFileSystem.openFile(objectURI); byte[] writtenObjectContent = IOUtils.toString(is).getBytes(); is.close(); Assert.assertArrayEquals(object, writtenObjectContent); } @Test public void putDirectoryObject() throws Exception { final String bucketName = "directory-bucket"; createBucketRestCall(bucketName); final String directoryName = "directory/"; createObject(bucketName + AlluxioURI.SEPARATOR + directoryName, new byte[]{}, null, null); final List<URIStatus> statuses = mFileSystem.listStatus( new AlluxioURI(AlluxioURI.SEPARATOR + bucketName)); assertEquals(1, statuses.size()); assertEquals(true, statuses.get(0).isFolder()); } @Test public void putSmallObject() throws Exception { final String bucketName = "small-object-bucket"; createBucketRestCall(bucketName); final String objectName = "object"; putObjectTest(bucketName, objectName, "Hello World!".getBytes(), null, null); } @Test public void putLargeObject() throws Exception { final String bucketName = "large-object-bucket"; createBucketRestCall(bucketName); final String objectName = "object"; final byte[] object = CommonUtils.randomAlphaNumString(LARGE_DATA_SIZE).getBytes(); putObjectTest(bucketName, objectName, object, null, null); } @Test public void putObjectUnderNonExistentBucket() throws Exception { final String bucket = "non-existent-bucket"; final String objectKey = bucket + AlluxioURI.SEPARATOR + "object.txt"; String message = "hello world"; try { createObjectRestCall(objectKey, message.getBytes(), null, NO_PARAMS); } catch (AssertionError e) { // expected return; } Assert.fail("create object under non-existent bucket should fail"); } @Test public void putObjectWithWrongMD5() throws Exception { final String bucket = "bucket"; createBucketRestCall(bucket); final String objectKey = bucket + AlluxioURI.SEPARATOR + "object.txt"; String objectContent = "hello world"; try { String wrongMD5 = BaseEncoding.base64().encode(objectContent.getBytes()); createObjectRestCall(objectKey, objectContent.getBytes(), wrongMD5, NO_PARAMS); } catch (AssertionError e) { // expected return; } Assert.fail("create object with wrong Content-MD5 should fail"); } @Test public void putObjectWithNoMD5() throws Exception { final String bucket = "bucket"; createBucketRestCall(bucket); final String objectKey = bucket + AlluxioURI.SEPARATOR + "object.txt"; String objectContent = "no md5 set"; String uri = S3_SERVICE_PREFIX + AlluxioURI.SEPARATOR + objectKey; TestCaseOptions options = TestCaseOptions.defaults(); options.setInputStream(new ByteArrayInputStream(objectContent.getBytes())); new TestCase(mHostname, mPort, uri, NO_PARAMS, HttpMethod.PUT, null, options).run(); } @Test public void getNonExistingBucket() throws Exception { final String bucketName = "non-existing-bucket"; try { // Delete a non-existing bucket should fail. new TestCase(mHostname, mPort, S3_SERVICE_PREFIX + AlluxioURI.SEPARATOR + bucketName, NO_PARAMS, HttpMethod.GET, null, TestCaseOptions.defaults().setContentType(TestCaseOptions.XML_CONTENT_TYPE)).run(); } catch (AssertionError e) { // expected return; } Assert.fail("get a non-existing bucket should fail"); } private void getObjectTest(byte[] expectedObject) throws Exception { final String bucket = "bucket"; createBucketRestCall(bucket); final String objectKey = bucket + AlluxioURI.SEPARATOR + "object.txt"; createObjectRestCall(objectKey, expectedObject, null, NO_PARAMS); Assert.assertArrayEquals(expectedObject, getObjectRestCall(objectKey).getBytes()); } @Test public void getSmallObject() throws Exception { getObjectTest("Hello World!".getBytes()); } @Test public void getLargeObject() throws Exception { getObjectTest(CommonUtils.randomAlphaNumString(LARGE_DATA_SIZE).getBytes()); } @Test public void getNonExistentObject() throws Exception { final String objectKey = "bucket/non-existent-object"; try { getObjectRestCall(objectKey); } catch (AssertionError e) { // expected return; } Assert.fail("get non-existent object should fail"); } @Test public void getObjectMetadata() throws Exception { final String bucket = "bucket"; createBucketRestCall(bucket); final String objectKey = bucket + AlluxioURI.SEPARATOR + "object.txt"; final byte[] objectContent = CommonUtils.randomAlphaNumString(10).getBytes(); createObjectRestCall(objectKey, objectContent, null, NO_PARAMS); HttpURLConnection connection = getObjectMetadataRestCall(objectKey); URIStatus status = mFileSystem.getStatus( new AlluxioURI(AlluxioURI.SEPARATOR + objectKey)); // remove the milliseconds from the last modification time because the accuracy of HTTP dates // is up to seconds. long lastModified = status.getLastModificationTimeMs() / 1000 * 1000; Assert.assertEquals(lastModified, connection.getLastModified()); Assert.assertEquals(String.valueOf(status.getLength()), connection.getHeaderField(S3Constants.S3_CONTENT_LENGTH_HEADER)); } @Test public void getNonExistentObjectMetadata() throws Exception { final String objectKey = "bucket/non-existent-object"; try { getObjectMetadataRestCall(objectKey); } catch (AssertionError e) { // expected return; } Assert.fail("get metadata of non-existent object should fail"); } @Test public void deleteObject() throws Exception { final String bucketName = "bucket-with-object-to-delete"; createBucketRestCall(bucketName); final String objectName = "file"; AlluxioURI bucketUri = new AlluxioURI(AlluxioURI.SEPARATOR + bucketName); AlluxioURI fileUri = new AlluxioURI( bucketUri.getPath() + AlluxioURI.SEPARATOR + objectName); mFileSystemMaster.createFile(fileUri, CreateFileContext.defaults()); // Verify the directory is created for the new bucket, and file is created under it. Assert.assertFalse(mFileSystemMaster .listStatus(bucketUri, ListStatusContext.defaults()).isEmpty()); deleteObjectRestCall(bucketName + AlluxioURI.SEPARATOR + objectName); // Verify the object is deleted. Assert.assertTrue(mFileSystemMaster .listStatus(bucketUri, ListStatusContext.defaults()).isEmpty()); } @Test public void deleteObjectAsAlluxioEmptyDir() throws Exception { final String bucketName = "bucket-with-empty-dir-to-delete"; createBucketRestCall(bucketName); String objectName = "empty-dir/"; AlluxioURI bucketUri = new AlluxioURI(AlluxioURI.SEPARATOR + bucketName); AlluxioURI dirUri = new AlluxioURI( bucketUri.getPath() + AlluxioURI.SEPARATOR + objectName); mFileSystemMaster.createDirectory(dirUri, CreateDirectoryContext.defaults()); // Verify the directory is created for the new bucket, and empty-dir is created under it. Assert.assertFalse(mFileSystemMaster .listStatus(bucketUri, ListStatusContext.defaults()).isEmpty()); deleteObjectRestCall(bucketName + AlluxioURI.SEPARATOR + objectName); // Verify the empty-dir as a valid object is deleted. Assert.assertTrue(mFileSystemMaster .listStatus(bucketUri, ListStatusContext.defaults()).isEmpty()); } @Test public void deleteObjectAsAlluxioNonEmptyDir() throws Exception { final String bucketName = "bucket-with-non-empty-dir-to-delete"; createBucketRestCall(bucketName); String objectName = "non-empty-dir/"; AlluxioURI bucketUri = new AlluxioURI(AlluxioURI.SEPARATOR + bucketName); AlluxioURI dirUri = new AlluxioURI( bucketUri.getPath() + AlluxioURI.SEPARATOR + objectName); mFileSystemMaster.createDirectory(dirUri, CreateDirectoryContext.defaults()); mFileSystemMaster.createFile( new AlluxioURI(dirUri.getPath() + "/file"), CreateFileContext.defaults()); Assert.assertFalse(mFileSystemMaster .listStatus(dirUri, ListStatusContext.defaults()).isEmpty()); try { deleteObjectRestCall(bucketName + AlluxioURI.SEPARATOR + objectName); } catch (AssertionError e) { // expected return; } Assert.fail("delete non-empty directory as an object should fail"); } @Test public void deleteNonExistingObject() throws Exception { final String bucketName = "bucket-with-nothing"; createBucketRestCall(bucketName); String objectName = "non-existing-object"; try { deleteObjectRestCall(bucketName + AlluxioURI.SEPARATOR + objectName); } catch (AssertionError e) { // expected return; } Assert.fail("delete non-existing object should fail"); } @Test public void initiateMultipartUpload() throws Exception { final String bucketName = "bucket"; createBucketRestCall(bucketName); final String objectName = "object"; String objectKey = bucketName + AlluxioURI.SEPARATOR + objectName; String result = initiateMultipartUploadRestCall(objectKey); String multipartTempDir = S3RestUtils.getMultipartTemporaryDirForObject( AlluxioURI.SEPARATOR + bucketName, objectName); URIStatus status = mFileSystem.getStatus(new AlluxioURI(multipartTempDir)); long tempDirId = status.getFileId(); InitiateMultipartUploadResult expected = new InitiateMultipartUploadResult(bucketName, objectName, Long.toString(tempDirId)); String expectedResult = XML_MAPPER.writeValueAsString(expected); Assert.assertEquals(expectedResult, result); } @Test public void uploadPart() throws Exception { final String bucketName = "bucket"; createBucketRestCall(bucketName); final String objectName = "object"; String objectKey = bucketName + AlluxioURI.SEPARATOR + objectName; String result = initiateMultipartUploadRestCall(objectKey); InitiateMultipartUploadResult multipartUploadResult = XML_MAPPER.readValue(result, InitiateMultipartUploadResult.class); final long uploadId = Long.parseLong(multipartUploadResult.getUploadId()); final byte[] object = CommonUtils.randomAlphaNumString(DATA_SIZE).getBytes(); putObjectTest(bucketName, objectName, object, uploadId, 1); } @Test public void uploadPartWithNonExistingUploadId() throws Exception { final String bucketName = "bucket"; createBucketRestCall(bucketName); final String objectName = "object"; String objectKey = bucketName + AlluxioURI.SEPARATOR + objectName; String result = initiateMultipartUploadRestCall(objectKey); InitiateMultipartUploadResult multipartUploadResult = XML_MAPPER.readValue(result, InitiateMultipartUploadResult.class); final long uploadId = Long.parseLong(multipartUploadResult.getUploadId()); final byte[] object = CommonUtils.randomAlphaNumString(DATA_SIZE).getBytes(); try { putObjectTest(bucketName, objectName, object, uploadId + 1, 1); } catch (AssertionError e) { // Expected because of the wrong upload ID. return; } Assert.fail("Upload part of an object with wrong upload ID should fail"); } @Test public void uploadPartWithoutInitiation() throws Exception { final String bucketName = "bucket"; createBucketRestCall(bucketName); try { final String objectName = "object"; final byte[] object = CommonUtils.randomAlphaNumString(DATA_SIZE).getBytes(); putObjectTest(bucketName, objectName, object, 1L, 1); } catch (AssertionError e) { // Expected because there is no such upload ID. return; } Assert.fail("Upload part of an object without multipart upload initialization should fail"); } @Test public void listParts() throws Exception { final String bucket = "bucket"; final String bucketPath = AlluxioURI.SEPARATOR + bucket; createBucketRestCall(bucket); final String object = "object"; final String objectKey = bucket + AlluxioURI.SEPARATOR + object; // Initiate multipart upload to get upload ID. String result = initiateMultipartUploadRestCall(objectKey); InitiateMultipartUploadResult multipartUploadResult = XML_MAPPER.readValue(result, InitiateMultipartUploadResult.class); final long uploadId = Long.parseLong(multipartUploadResult.getUploadId()); // No parts are uploaded yet. result = listPartsRestCall(objectKey, uploadId); ListPartsResult listPartsResult = XML_MAPPER.readValue(result, ListPartsResult.class); Assert.assertEquals(bucketPath, listPartsResult.getBucket()); Assert.assertEquals(object, listPartsResult.getKey()); Assert.assertEquals(Long.toString(uploadId), listPartsResult.getUploadId()); Assert.assertEquals(0, listPartsResult.getParts().size()); // Upload 2 parts. String object1 = CommonUtils.randomAlphaNumString(DATA_SIZE); String object2 = CommonUtils.randomAlphaNumString(DATA_SIZE); createObject(objectKey, object1.getBytes(), uploadId, 1); createObject(objectKey, object2.getBytes(), uploadId, 2); result = listPartsRestCall(objectKey, uploadId); listPartsResult = XML_MAPPER.readValue(result, ListPartsResult.class); Assert.assertEquals(bucketPath, listPartsResult.getBucket()); Assert.assertEquals(object, listPartsResult.getKey()); Assert.assertEquals(Long.toString(uploadId), listPartsResult.getUploadId()); String tmpDir = S3RestUtils.getMultipartTemporaryDirForObject(bucketPath, object); List<ListPartsResult.Part> parts = listPartsResult.getParts(); Assert.assertEquals(2, parts.size()); for (int partNumber = 1; partNumber <= parts.size(); partNumber++) { ListPartsResult.Part part = parts.get(partNumber - 1); Assert.assertEquals(partNumber, part.getPartNumber()); URIStatus status = mFileSystem.getStatus( new AlluxioURI(tmpDir + AlluxioURI.SEPARATOR + Integer.toString(partNumber))); Assert.assertEquals(S3RestUtils.toS3Date(status.getLastModificationTimeMs()), part.getLastModified()); Assert.assertEquals(status.getLength(), part.getSize()); } } @Test public void abortMultipartUpload() throws Exception { final String bucketName = "bucket"; createBucketRestCall(bucketName); final String objectName = "object"; String objectKey = bucketName + AlluxioURI.SEPARATOR + objectName; String result = initiateMultipartUploadRestCall(objectKey); InitiateMultipartUploadResult multipartUploadResult = XML_MAPPER.readValue(result, InitiateMultipartUploadResult.class); AlluxioURI tmpDir = new AlluxioURI(S3RestUtils.getMultipartTemporaryDirForObject( AlluxioURI.SEPARATOR + bucketName, objectName)); Assert.assertTrue(mFileSystem.exists(tmpDir)); Assert.assertTrue(mFileSystem.getStatus(tmpDir).isFolder()); final long uploadId = Long.parseLong(multipartUploadResult.getUploadId()); HttpURLConnection connection = abortMultipartUploadRestCall(objectKey, uploadId); Assert.assertEquals(Response.Status.NO_CONTENT.getStatusCode(), connection.getResponseCode()); Assert.assertFalse(mFileSystem.exists(tmpDir)); } @Test public void abortMultipartUploadWithNonExistingUploadId() throws Exception { final String bucketName = "bucket"; createBucketRestCall(bucketName); final String objectName = "object"; String objectKey = bucketName + AlluxioURI.SEPARATOR + objectName; String result = initiateMultipartUploadRestCall(objectKey); InitiateMultipartUploadResult multipartUploadResult = XML_MAPPER.readValue(result, InitiateMultipartUploadResult.class); AlluxioURI tmpDir = new AlluxioURI(S3RestUtils.getMultipartTemporaryDirForObject( AlluxioURI.SEPARATOR + bucketName, objectName)); Assert.assertTrue(mFileSystem.exists(tmpDir)); Assert.assertTrue(mFileSystem.getStatus(tmpDir).isFolder()); final long uploadId = Long.parseLong(multipartUploadResult.getUploadId()); try { abortMultipartUploadRestCall(objectKey, uploadId + 1); } catch (AssertionError e) { // Expected since the upload ID does not exist, the temporary directory should still exist. Assert.assertTrue(mFileSystem.exists(tmpDir)); return; } Assert.fail("Abort multipart upload with non-existing upload ID should fail"); } @Test public void completeMultipartUpload() throws Exception { // Two temporary parts in the multipart upload, each part contains a random string, // after completion, the object should contain the combination of the two strings. final String bucketName = "bucket"; createBucketRestCall(bucketName); final String objectName = "object"; String objectKey = bucketName + AlluxioURI.SEPARATOR + objectName; // Initiate the multipart upload. String result = initiateMultipartUploadRestCall(objectKey); InitiateMultipartUploadResult multipartUploadResult = XML_MAPPER.readValue(result, InitiateMultipartUploadResult.class); final long uploadId = Long.parseLong(multipartUploadResult.getUploadId()); // Upload parts. String object1 = CommonUtils.randomAlphaNumString(DATA_SIZE); String object2 = CommonUtils.randomAlphaNumString(DATA_SIZE); createObject(objectKey, object1.getBytes(), uploadId, 1); createObject(objectKey, object2.getBytes(), uploadId, 2); // Verify that the two parts are uploaded to the temporary directory. AlluxioURI tmpDir = new AlluxioURI(S3RestUtils.getMultipartTemporaryDirForObject( AlluxioURI.SEPARATOR + bucketName, objectName)); Assert.assertEquals(2, mFileSystem.listStatus(tmpDir).size()); // Complete the multipart upload. result = completeMultipartUploadRestCall(objectKey, uploadId); // Verify that the response is expected. String expectedCombinedObject = object1 + object2; MessageDigest md5 = MessageDigest.getInstance("MD5"); byte[] digest = md5.digest(expectedCombinedObject.getBytes()); String etag = Hex.encodeHexString(digest); String objectPath = AlluxioURI.SEPARATOR + objectKey; CompleteMultipartUploadResult completeMultipartUploadResult = new CompleteMultipartUploadResult(objectPath, bucketName, objectName, etag); Assert.assertEquals(XML_MAPPER.writeValueAsString(completeMultipartUploadResult), result); // Verify that the temporary directory is deleted. Assert.assertFalse(mFileSystem.exists(tmpDir)); // Verify that the completed object is expected. try (FileInStream is = mFileSystem.openFile(new AlluxioURI(objectPath))) { String combinedObject = IOUtils.toString(is); Assert.assertEquals(expectedCombinedObject, combinedObject); } } private void createBucketRestCall(String bucketName) throws Exception { String uri = S3_SERVICE_PREFIX + AlluxioURI.SEPARATOR + bucketName; new TestCase(mHostname, mPort, uri, NO_PARAMS, HttpMethod.PUT, null, TestCaseOptions.defaults()).run(); } private HttpURLConnection deleteBucketRestCall(String bucketName) throws Exception { String uri = S3_SERVICE_PREFIX + AlluxioURI.SEPARATOR + bucketName; return new TestCase(mHostname, mPort, uri, NO_PARAMS, HttpMethod.DELETE, null, TestCaseOptions.defaults()).execute(); } private void createObjectRestCall(String objectKey, byte[] objectContent, String md5, Map<String, String> params) throws Exception { String uri = S3_SERVICE_PREFIX + AlluxioURI.SEPARATOR + objectKey; TestCaseOptions options = TestCaseOptions.defaults(); if (md5 == null) { MessageDigest md5Hash = MessageDigest.getInstance("MD5"); byte[] md5Digest = md5Hash.digest(objectContent); md5 = BaseEncoding.base64().encode(md5Digest); } options.setMD5(md5); options.setInputStream(new ByteArrayInputStream(objectContent)); new TestCase(mHostname, mPort, uri, params, HttpMethod.PUT, null, options) .run(); } private String initiateMultipartUploadRestCall(String objectKey) throws Exception { String uri = S3_SERVICE_PREFIX + AlluxioURI.SEPARATOR + objectKey; Map<String, String> params = new HashMap<>(); params.put("uploads", ""); return new TestCase(mHostname, mPort, uri, params, HttpMethod.POST, null, TestCaseOptions.defaults()).call(); } private String completeMultipartUploadRestCall(String objectKey, long uploadId) throws Exception { String uri = S3_SERVICE_PREFIX + AlluxioURI.SEPARATOR + objectKey; Map<String, String> params = new HashMap<>(); params.put("uploadId", Long.toString(uploadId)); return new TestCase(mHostname, mPort, uri, params, HttpMethod.POST, null, TestCaseOptions.defaults()).call(); } private HttpURLConnection abortMultipartUploadRestCall(String objectKey, long uploadId) throws Exception { String uri = S3_SERVICE_PREFIX + AlluxioURI.SEPARATOR + objectKey; Map<String, String> params = new HashMap<>(); params.put("uploadId", Long.toString(uploadId)); return new TestCase(mHostname, mPort, uri, params, HttpMethod.DELETE, null, TestCaseOptions.defaults()).execute(); } private String listPartsRestCall(String objectKey, long uploadId) throws Exception { String uri = S3_SERVICE_PREFIX + AlluxioURI.SEPARATOR + objectKey; Map<String, String> params = new HashMap<>(); params.put("uploadId", Long.toString(uploadId)); return new TestCase(mHostname, mPort, uri, params, HttpMethod.GET, null, TestCaseOptions.defaults()).call(); } private HttpURLConnection getObjectMetadataRestCall(String objectKey) throws Exception { String uri = S3_SERVICE_PREFIX + AlluxioURI.SEPARATOR + objectKey; return new TestCase(mHostname, mPort, uri, NO_PARAMS, HttpMethod.HEAD, null, TestCaseOptions.defaults()).execute(); } private String getObjectRestCall(String objectKey) throws Exception { String uri = S3_SERVICE_PREFIX + AlluxioURI.SEPARATOR + objectKey; return new TestCase(mHostname, mPort, uri, NO_PARAMS, HttpMethod.GET, null, TestCaseOptions.defaults()).call(); } private void deleteObjectRestCall(String objectKey) throws Exception { String uri = S3_SERVICE_PREFIX + AlluxioURI.SEPARATOR + objectKey; new TestCase(mHostname, mPort, uri, NO_PARAMS, HttpMethod.DELETE, null, TestCaseOptions.defaults()).run(); } }
apache-2.0
JingchengDu/hbase
hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestRepairAfterFailedDelete.java
3539
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.backup; import static org.junit.Assert.assertTrue; import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.backup.impl.BackupSystemTable; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.util.ToolRunner; import org.junit.Test; import org.junit.experimental.categories.Category; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; @Category(LargeTests.class) public class TestRepairAfterFailedDelete extends TestBackupBase { private static final Log LOG = LogFactory.getLog(TestRepairAfterFailedDelete.class); @Test public void testRepairBackupDelete() throws Exception { LOG.info("test repair backup delete on a single table with data"); List<TableName> tableList = Lists.newArrayList(table1); String backupId = fullTableBackup(tableList); assertTrue(checkSucceeded(backupId)); LOG.info("backup complete"); String[] backupIds = new String[] { backupId }; BackupSystemTable table = new BackupSystemTable(TEST_UTIL.getConnection()); BackupInfo info = table.readBackupInfo(backupId); Path path = new Path(info.getBackupRootDir(), backupId); FileSystem fs = FileSystem.get(path.toUri(), conf1); assertTrue(fs.exists(path)); // Snapshot backup system table before delete String snapshotName = "snapshot-backup"; Connection conn = TEST_UTIL.getConnection(); Admin admin = conn.getAdmin(); admin.snapshot(snapshotName, BackupSystemTable.getTableName(conf1)); int deleted = getBackupAdmin().deleteBackups(backupIds); assertTrue(!fs.exists(path)); assertTrue(fs.exists(new Path(info.getBackupRootDir()))); assertTrue(1 == deleted); // Emulate delete failure // Restore backup system table admin.disableTable(BackupSystemTable.getTableName(conf1)); admin.restoreSnapshot(snapshotName); admin.enableTable(BackupSystemTable.getTableName(conf1)); // Start backup session table.startBackupExclusiveOperation(); // Start delete operation table.startDeleteOperation(backupIds); // Now run repair command to repair "failed" delete operation String[] args = new String[] {"repair"}; // Run restore int ret = ToolRunner.run(conf1, new BackupDriver(), args); assertTrue(ret == 0); // Verify that history length == 0 assertTrue (table.getBackupHistory().size() == 0); table.close(); admin.close(); } }
apache-2.0
ptupitsyn/ignite
modules/core/src/main/java/org/apache/ignite/internal/pagemem/PageSupport.java
4477
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.pagemem; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.internal.stat.IoStatisticsHolder; /** * Supports operations on pages. */ public interface PageSupport { /** * Gets the page absolute pointer associated with the given page ID. Each page obtained with this method must be * released by calling {@link #releasePage(int, long, long)}. This method will allocate page with given ID if it doesn't * exist. * * @param grpId Cache group ID. * @param pageId Page ID. * @return Page pointer. * @throws IgniteCheckedException If failed. */ public long acquirePage(int grpId, long pageId) throws IgniteCheckedException; /** * Gets the page absolute pointer associated with the given page ID. Each page obtained with this method must be * released by calling {@link #releasePage(int, long, long)}. This method will allocate page with given ID if it * doesn't exist. * * @param grpId Cache group ID. * @param pageId Page ID. * @param statHolder Statistics holder to track IO operations. * @return Page pointer. * @throws IgniteCheckedException If failed. */ public long acquirePage(int grpId, long pageId, IoStatisticsHolder statHolder) throws IgniteCheckedException; /** * * @param grpId Cache group ID. * @param pageId Page ID to release. * @param page Page pointer. */ public void releasePage(int grpId, long pageId, long page); /** * * @param grpId Cache group ID. * @param pageId Page ID. * @param page Page pointer. * @return Pointer for reading the page. */ public long readLock(int grpId, long pageId, long page); /** * Obtains read lock without checking page tag. * * @param grpId Cache group ID. * @param pageId Page ID. * @param page Page pointer. * @return Pointer for reading the page. */ public long readLockForce(int grpId, long pageId, long page); /** * Releases locked page. * * @param grpId Cache group ID. * @param pageId Page ID. * @param page Page pointer. */ public void readUnlock(int grpId, long pageId, long page); /** * * @param grpId Cache group ID. * @param pageId Page ID. * @param page Page pointer. * @return Address of a buffer with contents of the given page or * {@code 0L} if attempt to take the write lock failed. */ public long writeLock(int grpId, long pageId, long page); /** * * @param grpId Cache group ID. * @param pageId Page ID. * @param page Page pointer. * @return Address of a buffer with contents of the given page or * {@code 0L} if attempt to take the write lock failed. */ public long tryWriteLock(int grpId, long pageId, long page); /** * Releases locked page. * * @param grpId Cache group ID. * @param pageId Page ID. * @param page Page pointer. * @param walPlc {@code True} if page should be recorded to WAL, {@code false} if the page must not * be recorded and {@code null} for the default behavior. * @param dirtyFlag Determines whether the page was modified since the last checkpoint. */ public void writeUnlock(int grpId, long pageId, long page, Boolean walPlc, boolean dirtyFlag); /** * @param grpId Cache group ID. * @param pageId Page ID. * @param page Page pointer. * @return {@code True} if the page is dirty. */ public boolean isDirty(int grpId, long pageId, long page); }
apache-2.0
vivekasingh/javaspecialists-awt-event-mixins
src/main/java/eu/javaspecialists/tjsn/gui/lambda/facade/TreeExpansionListeners.java
1617
/* * Copyright (C) 2000-2014 Heinz Max Kabutz * * See the NOTICE file distributed with this work for additional * information regarding copyright ownership. Heinz Max Kabutz licenses * this file to you under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. You may * obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package eu.javaspecialists.tjsn.gui.lambda.facade; import javax.swing.event.*; import java.util.function.*; public interface TreeExpansionListeners { static TreeExpansionListener forTreeExpanded(Consumer<TreeExpansionEvent> c) { return new TreeExpansionAdapter() { public void treeExpanded(TreeExpansionEvent e) { c.accept(e); } }; } static TreeExpansionListener forTreeCollapsed(Consumer<TreeExpansionEvent> c) { return new TreeExpansionAdapter() { public void treeCollapsed(TreeExpansionEvent e) { c.accept(e); } }; } interface TreeExpansionAdapter extends TreeExpansionListener { default void treeExpanded(TreeExpansionEvent e) { } default void treeCollapsed(TreeExpansionEvent e) { } } }
apache-2.0
jaysoo/openid4java-openidorgfix
src/org/openid4java/consumer/ConsumerManager.java
69800
/* * Copyright 2006-2008 Sxip Identity Corporation */ package org.openid4java.consumer; import com.google.inject.Inject; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.http.HttpStatus; import org.openid4java.OpenIDException; import org.openid4java.association.Association; import org.openid4java.association.AssociationException; import org.openid4java.association.AssociationSessionType; import org.openid4java.association.DiffieHellmanSession; import org.openid4java.discovery.Discovery; import org.openid4java.discovery.DiscoveryException; import org.openid4java.discovery.DiscoveryInformation; import org.openid4java.discovery.Identifier; import org.openid4java.discovery.yadis.YadisResolver; import org.openid4java.message.AssociationError; import org.openid4java.message.AssociationRequest; import org.openid4java.message.AssociationResponse; import org.openid4java.message.AuthFailure; import org.openid4java.message.AuthImmediateFailure; import org.openid4java.message.AuthRequest; import org.openid4java.message.AuthSuccess; import org.openid4java.message.DirectError; import org.openid4java.message.Message; import org.openid4java.message.MessageException; import org.openid4java.message.ParameterList; import org.openid4java.message.VerifyRequest; import org.openid4java.message.VerifyResponse; import org.openid4java.server.IncrementalNonceGenerator; import org.openid4java.server.NonceGenerator; import org.openid4java.server.RealmVerifier; import org.openid4java.server.RealmVerifierFactory; import org.openid4java.util.HttpFetcher; import org.openid4java.util.HttpFetcherFactory; import org.openid4java.util.HttpRequestOptions; import org.openid4java.util.HttpResponse; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.MalformedURLException; import java.net.URL; import java.net.URLDecoder; import java.net.URLEncoder; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Stack; import javax.crypto.spec.DHParameterSpec; /** * Manages OpenID communications with an OpenID Provider (Server). * <p> * The Consumer site needs to have the same instance of this class throughout * the lifecycle of a OpenID authentication session. * * @author Marius Scurtescu, Johnny Bufu */ public class ConsumerManager { private static Log _log = LogFactory.getLog(ConsumerManager.class); private static final boolean DEBUG = _log.isDebugEnabled(); /** * Discovery process manager. */ private Discovery _discovery; /** * Direct pointer to HttpFetcher, for association and signature * verification requests. */ private HttpFetcher _httpFetcher; /** * Store for keeping track of the established associations. */ private ConsumerAssociationStore _associations = new InMemoryConsumerAssociationStore(); /** * Consumer-side nonce generator, needed for compatibility with OpenID 1.1. */ private NonceGenerator _consumerNonceGenerator = new IncrementalNonceGenerator(); /** * Private association store used for signing consumer nonces when operating * in compatibility (v1.x) mode. */ private ConsumerAssociationStore _privateAssociations = new InMemoryConsumerAssociationStore(); /** * Verifier for the nonces in authentication responses; * prevents replay attacks. */ private NonceVerifier _nonceVerifier = new InMemoryNonceVerifier(60); // --- association preferences --- /** * Maximum number of attmpts for establishing an association. */ private int _maxAssocAttempts = 4; /** * Flag for enabling or disabling stateless mode. */ private boolean _allowStateless = true; /** * The lowest encryption level session accepted for association sessions. */ private AssociationSessionType _minAssocSessEnc = AssociationSessionType.NO_ENCRYPTION_SHA1MAC; /** * The preferred association session type; will be attempted first. */ private AssociationSessionType _prefAssocSessEnc; /** * Parameters (modulus and generator) for the Diffie-Hellman sessions. */ private DHParameterSpec _dhParams = DiffieHellmanSession.getDefaultParameter(); /** * Timeout (in seconds) for keeping track of failed association attempts. * Default 5 minutes. */ private int _failedAssocExpire = 300; /** * Interval before the expiration of an association (in seconds) * in which the association should not be used, in order to avoid * the expiration from occurring in the middle of an authentication * transaction. Default: 300s. */ private int _preExpiryAssocLockInterval = 300; // --- authentication preferences --- /** * Flag for generating checkid_immediate authentication requests. */ private boolean _immediateAuth = false; /** * Used to perform verify realms against return_to URLs. */ private RealmVerifier _realmVerifier; /** * Instantiates a ConsumerManager with default settings. */ public ConsumerManager() { this( new RealmVerifierFactory(new YadisResolver(new HttpFetcherFactory())), new Discovery(), // uses HttpCache internally new HttpFetcherFactory()); } @Inject public ConsumerManager(RealmVerifierFactory realmFactory, Discovery discovery, HttpFetcherFactory httpFetcherFactory) { _realmVerifier = realmFactory.getRealmVerifierForConsumer(); // don't verify own (RP) identity, disable RP discovery _realmVerifier.setEnforceRpId(false); _discovery = discovery; _httpFetcher = httpFetcherFactory.createFetcher(HttpRequestOptions.getDefaultOptionsForOpCalls()); if (Association.isHmacSha256Supported()) _prefAssocSessEnc = AssociationSessionType.DH_SHA256; else _prefAssocSessEnc = AssociationSessionType.DH_SHA1; } /** * Returns discovery process manager. * * @return discovery process manager. */ public Discovery getDiscovery() { return _discovery; } /** * Sets discovery process manager. * * @param discovery discovery process manager. */ public void setDiscovery(Discovery discovery) { _discovery = discovery; } /** * Gets the association store that holds established associations with * OpenID providers. * * @see ConsumerAssociationStore */ public ConsumerAssociationStore getAssociations() { return _associations; } /** * Configures the ConsumerAssociationStore that will be used to store the * associations established with OpenID providers. * * @param associations ConsumerAssociationStore implementation * @see ConsumerAssociationStore */ @Inject public void setAssociations(ConsumerAssociationStore associations) { this._associations = associations; } /** * Gets the NonceVerifier implementation used to keep track of the nonces * that have been seen in authentication response messages. * * @see NonceVerifier */ public NonceVerifier getNonceVerifier() { return _nonceVerifier; } /** * Configures the NonceVerifier that will be used to keep track of the * nonces in the authentication response messages. * * @param nonceVerifier NonceVerifier implementation * @see NonceVerifier */ @Inject public void setNonceVerifier(NonceVerifier nonceVerifier) { this._nonceVerifier = nonceVerifier; } /** * Sets the Diffie-Hellman base parameters that will be used for encoding * the MAC key exchange. * <p> * If not provided the default set specified by the Diffie-Hellman algorithm * will be used. * * @param dhParams Object encapsulating modulus and generator numbers * @see DHParameterSpec DiffieHellmanSession */ public void setDHParams(DHParameterSpec dhParams) { this._dhParams = dhParams; } /** * Gets the Diffie-Hellman base parameters (modulus and generator). * * @see DHParameterSpec DiffieHellmanSession */ public DHParameterSpec getDHParams() { return _dhParams; } /** * Maximum number of attempts (HTTP calls) the RP is willing to make * for trying to establish an association with the OP. * * Default: 4; * 0 = don't use associations * * Associations and stateless mode cannot be both disabled at the same time. */ public void setMaxAssocAttempts(int maxAssocAttempts) { if (maxAssocAttempts > 0 || _allowStateless) this._maxAssocAttempts = maxAssocAttempts; else throw new IllegalArgumentException( "Associations and stateless mode " + "cannot be both disabled at the same time."); if (_maxAssocAttempts == 0) _log.info("Associations disabled."); } /** * Gets the value configured for the maximum number of association attempts * that will be performed for a given OpenID provider. * <p> * If an association cannot be established after this number of attempts the * ConsumerManager will fallback to stateless mode, provided the * #allowStateless preference is enabled. * <p> * See also: {@link #allowStateless(boolean)} {@link #statelessAllowed()} */ public int getMaxAssocAttempts() { return _maxAssocAttempts; } /** * Flag used to enable / disable the use of stateless mode. * <p> * Default: enabled. * <p> * Associations and stateless mode cannot be both disabled at the same time. * @deprecated * @see #setAllowStateless(boolean) */ public void allowStateless(boolean allowStateless) { setAllowStateless(allowStateless); } /** * Flag used to enable / disable the use of stateless mode. * <p> * Default: enabled. * <p> * Associations and stateless mode cannot be both disabled at the same time. */ public void setAllowStateless(boolean allowStateless) { if (_allowStateless || _maxAssocAttempts > 0) this._allowStateless = allowStateless; else throw new IllegalArgumentException( "Associations and stateless mode " + "cannot be both disabled at the same time."); } /** * Returns true if the ConsumerManager is configured to fallback to * stateless mode when failing to associate with an OpenID Provider. * * @deprecated * @see #isAllowStateless() */ public boolean statelessAllowed() { return _allowStateless; } /** * Returns true if the ConsumerManager is configured to fallback to * stateless mode when failing to associate with an OpenID Provider. */ public boolean isAllowStateless() { return _allowStateless; } /** * Configures the minimum level of encryption accepted for association * sessions. * <p> * Default: no-encryption session, SHA1 MAC association. * <p> * See also: {@link #allowStateless(boolean)} */ public void setMinAssocSessEnc(AssociationSessionType minAssocSessEnc) { this._minAssocSessEnc = minAssocSessEnc; } /** * Gets the minimum level of encryption that will be accepted for * association sessions. * <p> * Default: no-encryption session, SHA1 MAC association * <p> */ public AssociationSessionType getMinAssocSessEnc() { return _minAssocSessEnc; } /** * Sets the preferred encryption type for the association sessions. * <p> * Default: DH-SHA256 */ public void setPrefAssocSessEnc(AssociationSessionType prefAssocSessEnc) { this._prefAssocSessEnc = prefAssocSessEnc; } /** * Gets the preferred encryption type for the association sessions. */ public AssociationSessionType getPrefAssocSessEnc() { return _prefAssocSessEnc; } /** * Sets the expiration timeout (in seconds) for keeping track of failed * association attempts. * <p> * If an association cannot be establish with an OP, subsequesnt * authentication request to that OP will not try to establish an * association within the timeout period configured here. * <p> * Default: 300s * 0 = disabled (attempt to establish an association with every * authentication request) * * @param _failedAssocExpire time in seconds to remember failed * association attempts */ public void setFailedAssocExpire(int _failedAssocExpire) { this._failedAssocExpire = _failedAssocExpire; } /** * Gets the timeout (in seconds) configured for keeping track of failed * association attempts. * <p> * See also: {@link #setFailedAssocExpire(int)} */ public int getFailedAssocExpire() { return _failedAssocExpire; } /** * Gets the interval before the expiration of an association * (in seconds) in which the association should not be used, * in order to avoid the expiration from occurring in the middle * of a authentication transaction. Default: 300s. */ public int getPreExpiryAssocLockInterval() { return _preExpiryAssocLockInterval; } /** * Sets the interval before the expiration of an association * (in seconds) in which the association should not be used, * in order to avoid the expiration from occurring in the middle * of a authentication transaction. Default: 300s. * * @param preExpiryAssocLockInterval The number of seconds for the * pre-expiry lock inteval. */ public void setPreExpiryAssocLockInterval(int preExpiryAssocLockInterval) { this._preExpiryAssocLockInterval = preExpiryAssocLockInterval; } /** * Configures the authentication request mode: * checkid_immediate (true) or checkid_setup (false). * <p> * Default: false / checkid_setup */ public void setImmediateAuth(boolean _immediateAuth) { this._immediateAuth = _immediateAuth; } /** * Returns true if the ConsumerManager is configured to attempt * checkid_immediate authentication requests. * <p> * Default: false */ public boolean isImmediateAuth() { return _immediateAuth; } /** * Gets the RealmVerifier used to verify realms against return_to URLs. */ public RealmVerifier getRealmVerifier() { return _realmVerifier; } /** * Sets the RealmVerifier used to verify realms against return_to URLs. */ public void setRealmVerifier(RealmVerifier realmVerifier) { this._realmVerifier = realmVerifier; } /** * Gets the max age (in seconds) configured for keeping track of nonces. * <p> * Nonces older than the max age will be removed from the store and * authentication responses will be considered failures. */ public int getMaxNonceAge() { return _nonceVerifier.getMaxAge(); } /** * Sets the max age (in seconds) configured for keeping track of nonces. * <p> * Nonces older than the max age will be removed from the store and * authentication responses will be considered failures. */ public void setMaxNonceAge(int ageSeconds) { _nonceVerifier.setMaxAge(ageSeconds); } /** * Does discovery on an identifier. It delegates the call to its * discovery manager. * * @return A List of {@link DiscoveryInformation} objects. * The list could be empty if no discovery information can * be retrieved. * * @throws DiscoveryException if the discovery process runs into errors. */ public List discover(String identifier) throws DiscoveryException { return _discovery.discover(identifier); } /** * Configures a private association store for signing consumer nonces. * <p> * Consumer nonces are needed to prevent replay attacks in compatibility * mode, because OpenID 1.x Providers to not attach nonces to * authentication responses. * <p> * One way for the Consumer to know that a consumer nonce in an * authentication response was indeed issued by itself (and thus prevent * denial of service attacks), is by signing them. * * @param associations The association store to be used for signing consumer nonces; * signing can be deactivated by setting this to null. * Signing is enabled by default. */ public void setPrivateAssociationStore(ConsumerAssociationStore associations) throws ConsumerException { if (associations == null) throw new ConsumerException( "Cannot set null private association store, " + "needed for consumer nonces."); _privateAssociations = associations; } /** * Gets the private association store used for signing consumer nonces. * * @see #setPrivateAssociationStore(ConsumerAssociationStore) */ public ConsumerAssociationStore getPrivateAssociationStore() { return _privateAssociations; } public void setConnectTimeout(int connectTimeout) { _httpFetcher.getDefaultRequestOptions() .setConnTimeout(connectTimeout); } public void setSocketTimeout(int socketTimeout) { _httpFetcher.getDefaultRequestOptions() .setSocketTimeout(socketTimeout); } public void setMaxRedirects(int maxRedirects) { _httpFetcher.getDefaultRequestOptions() .setMaxRedirects(maxRedirects); } /** * Makes a HTTP call to the specified URL with the parameters specified * in the Message. * * @param url URL endpoint for the HTTP call * @param request Message containing the parameters * @param response ParameterList that will hold the parameters received in * the HTTP response * @return the status code of the HTTP call */ private int call(String url, Message request, ParameterList response) throws MessageException { int responseCode = -1; try { if (DEBUG) _log.debug("Performing HTTP POST on " + url); HttpResponse resp = _httpFetcher.post(url, request.getParameterMap()); responseCode = resp.getStatusCode(); String postResponse = resp.getBody(); response.copyOf(ParameterList.createFromKeyValueForm(postResponse)); if (DEBUG) _log.debug("Retrived response:\n" + postResponse); } catch (IOException e) { _log.error("Error talking to " + url + " response code: " + responseCode, e); } return responseCode; } /** * Tries to establish an association with on of the service endpoints in * the list of DiscoveryInformation. * <p> * Iterates over the items in the discoveries parameter a maximum of * #_maxAssocAttempts times trying to esablish an association. * * @param discoveries The DiscoveryInformation list obtained by * performing dicovery on the User-supplied OpenID * identifier. Should be ordered by the priority * of the service endpoints. * @return The DiscoveryInformation instance with which * an association was established, or the one * with the highest priority if association failed. * * @see Discovery#discover(org.openid4java.discovery.Identifier) */ public DiscoveryInformation associate(List discoveries) { DiscoveryInformation discovered; Association assoc; int attemptsLeft = _maxAssocAttempts; Iterator itr = discoveries.iterator(); while (itr.hasNext() && attemptsLeft > 0) { discovered = (DiscoveryInformation) itr.next(); attemptsLeft -= associate(discovered, attemptsLeft); // check if an association was established assoc = _associations.load(discovered.getOPEndpoint().toString()); if ( assoc != null && ! Association.FAILED_ASSOC_HANDLE.equals(assoc.getHandle())) return discovered; } if (discoveries.size() > 0) { // no association established, return the first service endpoint DiscoveryInformation d0 = (DiscoveryInformation) discoveries.get(0); _log.warn("Association failed; using first entry: " + d0.getOPEndpoint()); return d0; } else { _log.error("Association attempt, but no discovey endpoints provided."); return null; } } /** * Tries to establish an association with the OpenID Provider. * <p> * The resulting association information will be kept on storage for later * use at verification stage. If there exists an association for the opUrl * that is not near expiration, will not construct new association. * * @param discovered DiscoveryInformation obtained during the discovery * @return The number of association attempts performed. */ private int associate(DiscoveryInformation discovered, int maxAttempts) { if (_maxAssocAttempts == 0) return 0; // associations disabled URL opUrl = discovered.getOPEndpoint(); String opEndpoint = opUrl.toString(); _log.info("Trying to associate with " + opEndpoint + " attempts left: " + maxAttempts); // check if there's an already established association Association a = _associations.load(opEndpoint); if ( a != null && (Association.FAILED_ASSOC_HANDLE.equals(a.getHandle()) || a.getExpiry().getTime() - System.currentTimeMillis() > _preExpiryAssocLockInterval * 1000) ) { _log.info("Found an existing association: " + a.getHandle()); return 0; } String handle = Association.FAILED_ASSOC_HANDLE; // build a list of association types, with the preferred one at the end LinkedHashMap requests = new LinkedHashMap(); if (discovered.isVersion2()) { requests.put(AssociationSessionType.NO_ENCRYPTION_SHA1MAC, null); requests.put(AssociationSessionType.NO_ENCRYPTION_SHA256MAC, null); requests.put(AssociationSessionType.DH_SHA1, null); requests.put(AssociationSessionType.DH_SHA256, null); } else { requests.put(AssociationSessionType.NO_ENCRYPTION_COMPAT_SHA1MAC, null); requests.put(AssociationSessionType.DH_COMPAT_SHA1, null); } if (_prefAssocSessEnc.isVersion2() == discovered.isVersion2()) requests.put(_prefAssocSessEnc, null); // build a stack of Association Request objects // and keep only the allowed by the configured preferences // the most-desirable entry is always at the top of the stack Stack reqStack = new Stack(); Iterator iter = requests.keySet().iterator(); while(iter.hasNext()) { AssociationSessionType type = (AssociationSessionType) iter.next(); // create the appropriate Association Request AssociationRequest newReq = createAssociationRequest(type, opUrl); if (newReq != null) reqStack.push(newReq); } // perform the association attempts int attemptsLeft = maxAttempts; LinkedHashMap alreadyTried = new LinkedHashMap(); while (attemptsLeft > 0 && ! reqStack.empty()) { try { attemptsLeft--; AssociationRequest assocReq = (AssociationRequest) reqStack.pop(); if (DEBUG) _log.debug("Trying association type: " + assocReq.getType()); // was this association / session type attempted already? if (alreadyTried.keySet().contains(assocReq.getType())) { if (DEBUG) _log.debug("Already tried."); continue; } // mark the current request type as already tried alreadyTried.put(assocReq.getType(), null); ParameterList respParams = new ParameterList(); int status = call(opEndpoint, assocReq, respParams); // process the response if (status == HttpStatus.SC_OK) // success response { AssociationResponse assocResp; assocResp = AssociationResponse .createAssociationResponse(respParams); // valid association response Association assoc = assocResp.getAssociation(assocReq.getDHSess()); handle = assoc.getHandle(); AssociationSessionType respType = assocResp.getType(); if ( respType.equals(assocReq.getType()) || // v1 OPs may return a success no-encryption resp ( ! discovered.isVersion2() && respType.getHAlgorithm() == null && createAssociationRequest(respType,opUrl) != null)) { // store the association and do no try alternatives _associations.save(opEndpoint, assoc); _log.info("Associated with " + discovered.getOPEndpoint() + " handle: " + assoc.getHandle()); break; } else _log.info("Discarding association response, " + "not matching consumer criteria"); } else if (status == HttpStatus.SC_BAD_REQUEST) // error response { _log.info("Association attempt failed."); // retrieve fallback sess/assoc/encryption params set by OP // and queue a new attempt AssociationError assocErr = AssociationError.createAssociationError(respParams); AssociationSessionType opType = AssociationSessionType.create( assocErr.getSessionType(), assocErr.getAssocType()); if (alreadyTried.keySet().contains(opType)) continue; // create the appropriate Association Request AssociationRequest newReq = createAssociationRequest(opType, opUrl); if (newReq != null) { if (DEBUG) _log.debug("Retrieved association type " + "from the association error: " + newReq.getType()); reqStack.push(newReq); } } } catch (OpenIDException e) { _log.error("Error encountered during association attempt.", e); } } // store OPs with which an association could not be established // so that association attempts are not performed with each auth request if (Association.FAILED_ASSOC_HANDLE.equals(handle) && _failedAssocExpire > 0) _associations.save(opEndpoint, Association.getFailedAssociation(_failedAssocExpire)); return maxAttempts - attemptsLeft; } /** * Constructs an Association Request message of the specified session and * association type, taking into account the user preferences (encryption * level, default Diffie-Hellman parameters). * * @param type The type of the association (session and association) * @param opUrl The OP for which the association request is created * @return An AssociationRequest message ready to be sent back * to the OpenID Provider, or null if an association * of the requested type cannot be built. */ private AssociationRequest createAssociationRequest( AssociationSessionType type, URL opUrl) { try { if (_minAssocSessEnc.isBetter(type)) return null; AssociationRequest assocReq = null; DiffieHellmanSession dhSess; if (type.getHAlgorithm() != null) // DH session { dhSess = DiffieHellmanSession.create(type, _dhParams); if (DiffieHellmanSession.isDhSupported(type) && Association.isHmacSupported(type.getAssociationType())) assocReq = AssociationRequest.createAssociationRequest(type, dhSess); } else if ( opUrl.getProtocol().equals("https") && // no-enc sess Association.isHmacSupported(type.getAssociationType())) assocReq = AssociationRequest.createAssociationRequest(type); if (assocReq == null) _log.warn("Could not create association of type: " + type); return assocReq; } catch (OpenIDException e) { _log.error("Error trying to create association request.", e); return null; } } /** * Builds a authentication request message for the user specified in the * discovery information provided as a parameter. * <p> * If the discoveries parameter contains more than one entry, it will * iterate over them trying to establish an association. If an association * cannot be established, the first entry is used with stateless mode. * * @see #associate(java.util.List) * @param discoveries The DiscoveryInformation list obtained by * performing dicovery on the User-supplied OpenID * identifier. Should be ordered by the priority * of the service endpoints. * @param returnToUrl The URL on the Consumer site where the OpenID * Provider will return the user after generating * the authentication response. <br> * Null if the Consumer does not with to for the * End User to be returned to it (something else * useful will have been performed via an * extension). <br> * Must not be null in OpenID 1.x compatibility * mode. * @return Authentication request message to be sent to the * OpenID Provider. */ public AuthRequest authenticate(List discoveries, String returnToUrl) throws ConsumerException, MessageException { return authenticate(discoveries, returnToUrl, returnToUrl); } /** * Builds a authentication request message for the user specified in the * discovery information provided as a parameter. * <p> * If the discoveries parameter contains more than one entry, it will * iterate over them trying to establish an association. If an association * cannot be established, the first entry is used with stateless mode. * * @see #associate(java.util.List) * @param discoveries The DiscoveryInformation list obtained by * performing dicovery on the User-supplied OpenID * identifier. Should be ordered by the priority * of the service endpoints. * @param returnToUrl The URL on the Consumer site where the OpenID * Provider will return the user after generating * the authentication response. <br> * Null if the Consumer does not with to for the * End User to be returned to it (something else * useful will have been performed via an * extension). <br> * Must not be null in OpenID 1.x compatibility * mode. * @param realm The URL pattern that will be presented to the * user when he/she will be asked to authorize the * authentication transaction. Must be a super-set * of the @returnToUrl. * @return Authentication request message to be sent to the * OpenID Provider. */ public AuthRequest authenticate(List discoveries, String returnToUrl, String realm) throws ConsumerException, MessageException { // try to associate with one OP in the discovered list DiscoveryInformation discovered = associate(discoveries); return authenticate(discovered, returnToUrl, realm); } /** * Builds a authentication request message for the user specified in the * discovery information provided as a parameter. * * @param discovered A DiscoveryInformation endpoint from the list * obtained by performing dicovery on the * User-supplied OpenID identifier. * @param returnToUrl The URL on the Consumer site where the OpenID * Provider will return the user after generating * the authentication response. <br> * Null if the Consumer does not with to for the * End User to be returned to it (something else * useful will have been performed via an * extension). <br> * Must not be null in OpenID 1.x compatibility * mode. * @return Authentication request message to be sent to the * OpenID Provider. */ public AuthRequest authenticate(DiscoveryInformation discovered, String returnToUrl) throws MessageException, ConsumerException { return authenticate(discovered, returnToUrl, returnToUrl); } /** * Builds a authentication request message for the user specified in the * discovery information provided as a parameter. * * @param discovered A DiscoveryInformation endpoint from the list * obtained by performing dicovery on the * User-supplied OpenID identifier. * @param returnToUrl The URL on the Consumer site where the OpenID * Provider will return the user after generating * the authentication response. <br> * Null if the Consumer does not with to for the * End User to be returned to it (something else * useful will have been performed via an * extension). <br> * Must not be null in OpenID 1.x compatibility * mode. * @param realm The URL pattern that will be presented to the * user when he/she will be asked to authorize the * authentication transaction. Must be a super-set * of the @returnToUrl. * @return Authentication request message to be sent to the * OpenID Provider. */ public AuthRequest authenticate(DiscoveryInformation discovered, String returnToUrl, String realm) throws MessageException, ConsumerException { if (discovered == null) throw new ConsumerException("Authentication cannot continue: " + "no discovery information provided."); Association assoc = _associations.load(discovered.getOPEndpoint().toString()); if (assoc == null) { associate(discovered, _maxAssocAttempts); assoc = _associations.load(discovered.getOPEndpoint().toString()); } String handle = assoc != null ? assoc.getHandle() : Association.FAILED_ASSOC_HANDLE; // get the Claimed ID and Delegate ID (aka OP-specific identifier) String claimedId, delegate; if (discovered.hasClaimedIdentifier()) { claimedId = discovered.getClaimedIdentifier().getIdentifier(); delegate = discovered.hasDelegateIdentifier() ? discovered.getDelegateIdentifier() : claimedId; } else { claimedId = AuthRequest.SELECT_ID; delegate = AuthRequest.SELECT_ID; } // stateless mode disabled ? if ( !_allowStateless && Association.FAILED_ASSOC_HANDLE.equals(handle)) throw new ConsumerException("Authentication cannot be performed: " + "no association available and stateless mode is disabled"); _log.info("Creating authentication request for" + " OP-endpoint: " + discovered.getOPEndpoint() + " claimedID: " + claimedId + " OP-specific ID: " + delegate); if (! discovered.isVersion2()) returnToUrl = insertConsumerNonce(discovered.getOPEndpoint().toString(), returnToUrl); AuthRequest authReq = AuthRequest.createAuthRequest(claimedId, delegate, ! discovered.isVersion2(), returnToUrl, handle, realm, _realmVerifier); authReq.setOPEndpoint(discovered.getOPEndpoint()); // ignore the immediate flag for OP-directed identifier selection if (! AuthRequest.SELECT_ID.equals(claimedId)) authReq.setImmediate(_immediateAuth); return authReq; } /** * Performs verification on the Authentication Response (assertion) * received from the OpenID Provider. * <p> * Three verification steps are performed: * <ul> * <li> nonce: the same assertion will not be accepted more * than once * <li> signatures: verifies that the message was indeed sent * by the OpenID Provider that was contacted * earlier after discovery * <li> discovered information: the information contained in the assertion * matches the one obtained during the * discovery (the OpenID Provider is * authoritative for the claimed identifier; * the received assertion is not meaningful * otherwise * </ul> * * @param receivingUrl The URL where the Consumer (Relying Party) has * accepted the incoming message. * @param response ParameterList of the authentication response * being verified. * @param discovered Previously discovered information (which can * therefore be trusted) obtained during the discovery * phase; this should be stored and retrieved by the RP * in the user's session. * * @return A VerificationResult, containing a verified * identifier; the verified identifier is null if * the verification failed). */ public VerificationResult verify(String receivingUrl, ParameterList response, DiscoveryInformation discovered) throws MessageException, DiscoveryException, AssociationException { VerificationResult result = new VerificationResult(); _log.info("Verifying authentication response..."); // non-immediate negative response if ( "cancel".equals(response.getParameterValue("openid.mode")) ) { result.setAuthResponse(AuthFailure.createAuthFailure(response)); _log.info("Received auth failure."); return result; } // immediate negative response if ( "setup_needed".equals(response.getParameterValue("openid.mode")) || ("id_res".equals(response.getParameterValue("openid.mode")) && response.hasParameter("openid.user_setup_url") ) ) { AuthImmediateFailure fail = AuthImmediateFailure.createAuthImmediateFailure(response); result.setAuthResponse(fail); result.setOPSetupUrl(fail.getUserSetupUrl()); _log.info("Received auth immediate failure."); return result; } AuthSuccess authResp = AuthSuccess.createAuthSuccess(response); _log.info("Received positive auth response."); authResp.validate(); result.setAuthResponse(authResp); // [1/4] return_to verification if (! verifyReturnTo(receivingUrl, authResp)) { result.setStatusMsg("Return_To URL verification failed."); _log.error("Return_To URL verification failed."); return result; } // [2/4] : discovered info verification discovered = verifyDiscovered(authResp, discovered); if (discovered == null || ! discovered.hasClaimedIdentifier()) { result.setStatusMsg("Discovered information verification failed."); _log.error("Discovered information verification failed."); return result; } // [3/4] : nonce verification if (! verifyNonce(authResp, discovered)) { result.setStatusMsg("Nonce verification failed."); _log.error("Nonce verification failed."); return result; } // [4/4] : signature verification return (verifySignature(authResp, discovered, result)); } /** * Verifies that the URL where the Consumer (Relying Party) received the * authentication response matches the value of the "openid.return_to" * parameter in the authentication response. * * @param receivingUrl The URL where the Consumer received the * authentication response. * @param response The authentication response. * @return True if the two URLs match, false otherwise. */ public boolean verifyReturnTo(String receivingUrl, AuthSuccess response) { if (DEBUG) _log.debug("Verifying return URL; receiving: " + receivingUrl + "\nmessage: " + response.getReturnTo()); URL receiving; URL returnTo; try { receiving = new URL(receivingUrl); returnTo = new URL(response.getReturnTo()); } catch (MalformedURLException e) { _log.error("Invalid return URL.", e); return false; } // [1/2] schema, authority (includes port) and path // deal manually with the trailing slash in the path StringBuffer receivingPath = new StringBuffer(receiving.getPath()); if ( receivingPath.length() > 0 && receivingPath.charAt(receivingPath.length() -1) != '/') receivingPath.append('/'); StringBuffer returnToPath = new StringBuffer(returnTo.getPath()); if ( returnToPath.length() > 0 && returnToPath.charAt(returnToPath.length() -1) != '/') returnToPath.append('/'); if ( ! receiving.getProtocol().equals(returnTo.getProtocol()) || ! receiving.getAuthority().equals(returnTo.getAuthority()) || ! receivingPath.toString().equals(returnToPath.toString()) ) { if (DEBUG) _log.debug("Return URL schema, authority or " + "path verification failed."); return false; } // [2/2] query parameters try { Map returnToParams = extractQueryParams(returnTo); Map receivingParams = extractQueryParams(receiving); if (returnToParams == null) return true; if (receivingParams == null) { if (DEBUG) _log.debug("Return URL query parameters verification failed."); return false; } Iterator iter = returnToParams.keySet().iterator(); while (iter.hasNext()) { String key = (String) iter.next(); List receivingValues = (List) receivingParams.get(key); List returnToValues = (List) returnToParams.get(key); if ( receivingValues == null || receivingValues.size() != returnToValues.size() || ! receivingValues.containsAll( returnToValues ) ) { if (DEBUG) _log.debug("Return URL query parameters verification failed."); return false; } } } catch (UnsupportedEncodingException e) { _log.error("Error verifying return URL query parameters.", e); return false; } return true; } /** * Returns a Map(key, List(values)) with the URL's query params, or null if * the URL doesn't have a query string. */ public Map extractQueryParams(URL url) throws UnsupportedEncodingException { if (url.getQuery() == null) return null; Map paramsMap = new HashMap(); List paramList = Arrays.asList(url.getQuery().split("&")); Iterator iter = paramList.iterator(); while (iter.hasNext()) { String keyValue = (String) iter.next(); int equalPos = keyValue.indexOf("="); String key = equalPos > -1 ? URLDecoder.decode(keyValue.substring(0, equalPos), "UTF-8") : URLDecoder.decode(keyValue, "UTF-8"); String value; if (equalPos <= -1) value = null; else if (equalPos + 1 > keyValue.length()) value = ""; else value = URLDecoder.decode(keyValue.substring(equalPos + 1), "UTF-8"); List existingValues = (List) paramsMap.get(key); if (existingValues == null) { List newValues = new ArrayList(); newValues.add(value); paramsMap.put(key, newValues); } else existingValues.add(value); } return paramsMap; } /** * Verifies the nonce in an authentication response. * * @param authResp The authentication response containing the nonce * to be verified. * @param discovered The discovery information associated with the * authentication transaction. * @return True if the nonce is valid, false otherwise. */ public boolean verifyNonce(AuthSuccess authResp, DiscoveryInformation discovered) { String nonce = authResp.getNonce(); if (nonce == null) // compatibility mode nonce = extractConsumerNonce(authResp.getReturnTo(), discovered.getOPEndpoint().toString()); if (nonce == null) return false; // using the same nonce verifier for both server and consumer nonces return (NonceVerifier.OK == _nonceVerifier.seen( discovered.getOPEndpoint().toString(), nonce)); } /** * Inserts a consumer-side nonce as a custom parameter in the return_to * parameter of the authentication request. * <p> * Needed for preventing replay attack when running compatibility mode. * OpenID 1.1 OpenID Providers do not generate nonces in authentication * responses. * * @param opUrl The endpoint to be used for private association. * @param returnTo The return_to URL to which a custom nonce * parameter will be added. * @return The return_to URL containing the nonce. */ public String insertConsumerNonce(String opUrl, String returnTo) { String nonce = _consumerNonceGenerator.next(); returnTo += (returnTo.indexOf('?') != -1) ? '&' : '?'; Association privateAssoc = _privateAssociations.load(opUrl); if( privateAssoc == null ) { try { if (DEBUG) _log.debug( "Creating private association for opUrl " + opUrl); privateAssoc = Association.generate( getPrefAssocSessEnc().getAssociationType(), "", _failedAssocExpire); _privateAssociations.save( opUrl, privateAssoc ); } catch ( AssociationException e ) { _log.error("Cannot initialize private association.", e); return null; } } try { returnTo += "openid.rpnonce=" + URLEncoder.encode(nonce, "UTF-8"); returnTo += "&openid.rpsig=" + URLEncoder.encode(privateAssoc.sign(returnTo), "UTF-8"); _log.info("Inserted consumer nonce: " + nonce); if (DEBUG) _log.debug("return_to:" + returnTo); } catch (Exception e) { _log.error("Error inserting consumre nonce.", e); return null; } return returnTo; } /** * Extracts the consumer-side nonce from the return_to parameter in * authentication response from a OpenID 1.1 Provider. * * @param returnTo return_to URL from the authentication response * @param opUrl URL for the appropriate OP endpoint * @return The nonce found in the return_to URL, or null if * it wasn't found. */ public String extractConsumerNonce(String returnTo, String opUrl) { if (DEBUG) _log.debug("Extracting consumer nonce..."); String nonce = null; String signature = null; URL returnToUrl; try { returnToUrl = new URL(returnTo); } catch (MalformedURLException e) { _log.error("Invalid return_to: " + returnTo, e); return null; } String query = returnToUrl.getQuery(); String[] params = query.split("&"); for (int i=0; i < params.length; i++) { String keyVal[] = params[i].split("=", 2); try { if (keyVal.length == 2 && "openid.rpnonce".equals(keyVal[0])) { nonce = URLDecoder.decode(keyVal[1], "UTF-8"); if (DEBUG) _log.debug("Extracted consumer nonce: " + nonce); } if (keyVal.length == 2 && "openid.rpsig".equals(keyVal[0])) { signature = URLDecoder.decode(keyVal[1], "UTF-8"); if (DEBUG) _log.debug("Extracted consumer nonce signature: " + signature); } } catch (UnsupportedEncodingException e) { _log.error("Error extracting consumer nonce / signarure.", e); return null; } } // check the signature if (signature == null) { _log.error("Null consumer nonce signature."); return null; } String signed = returnTo.substring(0, returnTo.indexOf("&openid.rpsig=")); if (DEBUG) _log.debug("Consumer signed text:\n" + signed); try { if (DEBUG) _log.debug( "Loading private association for opUrl " + opUrl ); Association privateAssoc = _privateAssociations.load(opUrl); if( privateAssoc == null ) { _log.error("Null private association."); return null; } if (privateAssoc.verifySignature(signed, signature)) { _log.info("Consumer nonce signature verified."); return nonce; } else { _log.error("Consumer nonce signature failed."); return null; } } catch (AssociationException e) { _log.error("Error verifying consumer nonce signature.", e); return null; } } /** * Verifies the dicovery information matches the data received in a * authentication response from an OpenID Provider. * * @param authResp The authentication response to be verified. * @param discovered The discovery information obtained earlier during * the discovery stage, associated with the * identifier(s) in the request. Stateless operation * is assumed if null. * @return The discovery information associated with the * claimed identifier, that can be used further in * the verification process. Null if the discovery * on the claimed identifier does not match the data * in the assertion. */ private DiscoveryInformation verifyDiscovered(AuthSuccess authResp, DiscoveryInformation discovered) throws DiscoveryException { if (authResp == null || authResp.getIdentity() == null) { _log.info("Assertion is not about an identifier"); return null; } if (authResp.isVersion2()) return verifyDiscovered2(authResp, discovered); else return verifyDiscovered1(authResp, discovered); } /** * Verifies the discovered information associated with a OpenID 1.x * response. * * @param authResp The authentication response to be verified. * @param discovered The discovery information obtained earlier during * the discovery stage, associated with the * identifier(s) in the request. Stateless operation * is assumed if null. * @return The discovery information associated with the * claimed identifier, that can be used further in * the verification process. Null if the discovery * on the claimed identifier does not match the data * in the assertion. */ private DiscoveryInformation verifyDiscovered1(AuthSuccess authResp, DiscoveryInformation discovered) throws DiscoveryException { if ( authResp == null || authResp.isVersion2() || authResp.getIdentity() == null ) { if (DEBUG) _log.error("Invalid authentication response: " + "cannot verify v1 discovered information"); return null; } // asserted identifier in the AuthResponse String assertId = authResp.getIdentity(); if ( discovered != null && ! discovered.isVersion2() && discovered.getClaimedIdentifier() != null ) { // statefull mode if (DEBUG) _log.debug("Verifying discovered information " + "for OpenID1 assertion about ClaimedID: " + discovered.getClaimedIdentifier().getIdentifier()); String discoveredId = discovered.hasDelegateIdentifier() ? discovered.getDelegateIdentifier() : discovered.getClaimedIdentifier().getIdentifier(); if (assertId.equals(discoveredId)) return discovered; } // stateless, bare response, or the user changed the ID at the OP _log.info("Proceeding with stateless mode / bare response verification..."); DiscoveryInformation firstServiceMatch = null; // assuming openid.identity is the claimedId // (delegation can't work with stateless/bare resp v1 operation) if (DEBUG) _log.debug( "Performing discovery on the ClaimedID in the assertion: " + assertId); List discoveries = _discovery.discover(assertId); Iterator iter = discoveries.iterator(); while (iter.hasNext()) { DiscoveryInformation service = (DiscoveryInformation) iter.next(); if (service.isVersion2() || // only interested in v1 ! service.hasClaimedIdentifier() || // need a claimedId service.hasDelegateIdentifier() || // not allowing delegates ! assertId.equals(service.getClaimedIdentifier().getIdentifier())) continue; if (DEBUG) _log.debug("Found matching service: " + service); // keep the first endpoint that matches if (firstServiceMatch == null) firstServiceMatch = service; Association assoc = _associations.load( service.getOPEndpoint().toString(), authResp.getHandle()); // don't look further if there is an association with this endpoint if (assoc != null) { if (DEBUG) _log.debug("Found existing association for " + service + " Not looking for another service endpoint."); return service; } } if (firstServiceMatch == null) _log.error("No service element found to match " + "the identifier in the assertion."); return firstServiceMatch; } /** * Verifies the discovered information associated with a OpenID 2.0 * response. * * @param authResp The authentication response to be verified. * @param discovered The discovery information obtained earlier during * the discovery stage, associated with the * identifier(s) in the request. Stateless operation * is assumed if null. * @return The discovery information associated with the * claimed identifier, that can be used further in * the verification process. Null if the discovery * on the claimed identifier does not match the data * in the assertion. */ private DiscoveryInformation verifyDiscovered2(AuthSuccess authResp, DiscoveryInformation discovered) throws DiscoveryException { if (authResp == null || ! authResp.isVersion2() || authResp.getIdentity() == null || authResp.getClaimed() == null) { if (DEBUG) _log.debug("Discovered information doesn't match " + "auth response / version"); return null; } // asserted identifier in the AuthResponse String assertId = authResp.getIdentity(); // claimed identifier in the AuthResponse Identifier respClaimed = _discovery.parseIdentifier(authResp.getClaimed(), true); // the OP endpoint sent in the response String respEndpoint = authResp.getOpEndpoint(); if (DEBUG) _log.debug("Verifying discovered information for OpenID2 assertion " + "about ClaimedID: " + respClaimed.getIdentifier()); // was the claimed identifier in the assertion previously discovered? if (discovered != null && discovered.hasClaimedIdentifier() && discovered.getClaimedIdentifier().equals(respClaimed) ) { // OP-endpoint, OP-specific ID and protocol version must match String opSpecific = discovered.hasDelegateIdentifier() ? discovered.getDelegateIdentifier() : discovered.getClaimedIdentifier().getIdentifier(); if ( opSpecific.equals(assertId) && discovered.isVersion2() && discovered.getOPEndpoint().toString().equals(respEndpoint)) { if (DEBUG) _log.debug( "ClaimedID in the assertion was previously discovered: " + respClaimed); return discovered; } } // stateless, bare response, or the user changed the ID at the OP DiscoveryInformation firstServiceMatch = null; // perform discovery on the claim identifier in the assertion if(DEBUG) _log.debug( "Performing discovery on the ClaimedID in the assertion: " + respClaimed); List discoveries = _discovery.discover(respClaimed); // find the newly discovered service endpoint that matches the assertion // - OP endpoint, OP-specific ID and protocol version must match // - prefer (first = highest priority) endpoint with an association if (DEBUG) _log.debug("Looking for a service element to match " + "the ClaimedID and OP endpoint in the assertion..."); Iterator iter = discoveries.iterator(); while (iter.hasNext()) { DiscoveryInformation service = (DiscoveryInformation) iter.next(); if (DiscoveryInformation.OPENID2_OP.equals(service.getVersion())) continue; String opSpecific = service.hasDelegateIdentifier() ? service.getDelegateIdentifier() : service.getClaimedIdentifier().getIdentifier(); if ( ! opSpecific.equals(assertId) || ! service.isVersion2() || ! service.getOPEndpoint().toString().equals(respEndpoint) ) continue; // keep the first endpoint that matches if (firstServiceMatch == null) { if (DEBUG) _log.debug("Found matching service: " + service); firstServiceMatch = service; } Association assoc = _associations.load( service.getOPEndpoint().toString(), authResp.getHandle()); // don't look further if there is an association with this endpoint if (assoc != null) { if (DEBUG) _log.debug("Found existing association, " + "not looking for another service endpoint."); return service; } } if (firstServiceMatch == null) _log.error("No service element found to match " + "the ClaimedID / OP-endpoint in the assertion."); return firstServiceMatch; } /** * Verifies the signature in a authentication response message. * * @param authResp Authentication response to be verified. * @param discovered The discovery information obtained earlier during * the discovery stage. * @return True if the verification succeeded, false otherwise. */ private VerificationResult verifySignature(AuthSuccess authResp, DiscoveryInformation discovered, VerificationResult result) throws AssociationException, MessageException, DiscoveryException { if (discovered == null || authResp == null) { _log.error("Can't verify signature: " + "null assertion or discovered information."); result.setStatusMsg("Can't verify signature: " + "null assertion or discovered information."); return result; } Identifier claimedId = discovered.isVersion2() ? _discovery.parseIdentifier(authResp.getClaimed()) : //may have frag discovered.getClaimedIdentifier(); //assert id may be delegate in v1 String handle = authResp.getHandle(); URL op = discovered.getOPEndpoint(); Association assoc = _associations.load(op.toString(), handle); if (assoc != null) // association available, local verification { _log.info("Found association: " + assoc.getHandle() + " verifying signature locally..."); String text = authResp.getSignedText(); String signature = authResp.getSignature(); if (assoc.verifySignature(text, signature)) { result.setVerifiedId(claimedId); if (DEBUG) _log.debug("Local signature verification succeeded."); } else if (DEBUG) { _log.debug("Local signature verification failed."); result.setStatusMsg("Local signature verification failed"); } } else // no association, verify with the OP { _log.info("No association found, " + "contacting the OP for direct verification..."); VerifyRequest vrfy = VerifyRequest.createVerifyRequest(authResp); ParameterList responseParams = new ParameterList(); int respCode = call(op.toString(), vrfy, responseParams); if (HttpStatus.SC_OK == respCode) { VerifyResponse vrfyResp = VerifyResponse.createVerifyResponse(responseParams); vrfyResp.validate(); if (vrfyResp.isSignatureVerified()) { // process the optional invalidate_handle first String invalidateHandle = vrfyResp.getInvalidateHandle(); if (invalidateHandle != null) _associations.remove(op.toString(), invalidateHandle); result.setVerifiedId(claimedId); if (DEBUG) _log.debug("Direct signature verification succeeded " + "with OP: " + op); } else { if (DEBUG) _log.debug("Direct signature verification failed " + "with OP: " + op); result.setStatusMsg("Direct signature verification failed."); } } else { DirectError err = DirectError.createDirectError(responseParams); if (DEBUG) _log.debug("Error verifying signature with the OP: " + op + " error message: " + err.keyValueFormEncoding()); result.setStatusMsg("Error verifying signature with the OP: " + err.getErrorMsg()); } } Identifier verifiedID = result.getVerifiedId(); if (verifiedID != null) _log.info("Verification succeeded for: " + verifiedID); else _log.error("Verification failed for: " + authResp.getClaimed() + " reason: " + result.getStatusMsg()); return result; } /* visible for testing */ HttpFetcher getHttpFetcher() { return _httpFetcher; } }
apache-2.0
emergentone/10-dependencies
src/test/java/org/gradle/tests3/Test3_6.java
161
package org.gradle.tests3; import org.junit.Test; public class Test3_6 { @Test public void myTest() throws Exception { Thread.sleep(5); } }
apache-2.0
sungsoo/optiq-project
avatica/src/main/java/net/hydromatic/avatica/ConnectStringParser.java
10927
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.hydromatic.avatica; import java.sql.SQLException; import java.util.Map; import java.util.Properties; /** * ConnectStringParser is a utility class that parses or creates a JDBC connect * string according to the OLE DB connect string syntax described at <a * href="http://msdn.microsoft.com/library/default.asp?url=/library/en-us/oledb/htm/oledbconnectionstringsyntax.asp"> * OLE DB Connection String Syntax</a>. * * <p>This code was adapted from Mondrian's mondrian.olap.Util class. * The primary differences between this and its Mondrian progenitor are: * * <ul> * <li>use of regular {@link Properties} for compatibility with the JDBC API * (replaces Mondrian's use of its own order-preserving and case-insensitive * PropertyList, found in Util.java at link above)</li> * <li>ability to pass to {@link #parse} a pre-existing Properties object into * which properties are to be parsed, possibly overriding prior values</li> * <li>use of {@link SQLException}s rather than unchecked {@link * RuntimeException}s</li> * <li>static members for parsing and creating connect strings</li> * </ul> * * <p>ConnectStringParser has a private constructor. Callers use the static * members: * * <dl> * <dt>{@link #parse(String)} * <dd>Parses the connect string into a new Properties object. * * <dt>{@link #parse(String, Properties)} * <dd>Parses the connect string into an existing Properties object. * * <dt>{@link #getParamString(Properties)} * <dd>Returns a param string, quoted and escaped as needed, to represent the * supplied name-value pairs. * </dl> */ public class ConnectStringParser { //~ Instance fields -------------------------------------------------------- private final String s; private final int n; private int i; private final StringBuilder nameBuf = new StringBuilder(); private final StringBuilder valueBuf = new StringBuilder(); //~ Constructors ----------------------------------------------------------- /** * Creates a new connect string parser. * * @param s connect string to parse * * @see #parse(String) * @see #parse(String, Properties) */ private ConnectStringParser(String s) { this.s = s; this.i = 0; this.n = s.length(); } //~ Methods ---------------------------------------------------------------- /** * Parses the connect string into a new Properties object. * * @param s connect string to parse * * @return properties object with parsed params * * @throws SQLException error parsing name-value pairs */ public static Properties parse(String s) throws SQLException { return new ConnectStringParser(s).parseInternal(null); } /** * Parses the connect string into an existing Properties object. * * @param s connect string to parse * @param props optional properties object, may be <code>null</code> * * @return properties object with parsed params; if an input <code> * props</code> was supplied, any duplicate properties will have been * replaced by those from the connect string. * * @throws SQLException error parsing name-value pairs */ public static Properties parse(String s, Properties props) throws SQLException { return new ConnectStringParser(s).parseInternal(props); } /** * Parses the connect string into a Properties object. Note that the string * can only be parsed once. Subsequent calls return empty/unchanged * Properties. * * @param props optional properties object, may be <code>null</code> * * @return properties object with parsed params; if an input <code> * props</code> was supplied, any duplicate properties will have been * replaced by those from the connect string. * * @throws SQLException error parsing name-value pairs */ Properties parseInternal(Properties props) throws SQLException { if (props == null) { props = new Properties(); } while (i < n) { parsePair(props); } return props; } /** * Reads "name=value;" or "name=value<EOF>". * * @throws SQLException error parsing value */ void parsePair(Properties props) throws SQLException { String name = parseName(); String value; if (i >= n) { value = ""; } else if (s.charAt(i) == ';') { i++; value = ""; } else { value = parseValue(); } props.put(name, value); } /** * Reads "name=". Name can contain equals sign if equals sign is doubled. */ String parseName() { nameBuf.setLength(0); while (true) { char c = s.charAt(i); switch (c) { case '=': i++; if ((i < n) && ((c = s.charAt(i)) == '=')) { // doubled equals sign; take one of them, and carry on i++; nameBuf.append(c); break; } String name = nameBuf.toString(); name = name.trim(); return name; case ' ': if (nameBuf.length() == 0) { // ignore preceding spaces i++; break; } // fall through default: nameBuf.append(c); i++; if (i >= n) { return nameBuf.toString().trim(); } } } } /** * Reads "value;" or "value<EOF>" * * @throws SQLException if find an unterminated quoted value */ String parseValue() throws SQLException { char c; // skip over leading white space while ((c = s.charAt(i)) == ' ') { i++; if (i >= n) { return ""; } } if ((c == '"') || (c == '\'')) { String value = parseQuoted(c); // skip over trailing white space while ((i < n) && ((c = s.charAt(i)) == ' ')) { i++; } if (i >= n) { return value; } else if (s.charAt(i) == ';') { i++; return value; } else { throw new SQLException( "quoted value ended too soon, at position " + i + " in '" + s + "'"); } } else { String value; int semi = s.indexOf(';', i); if (semi >= 0) { value = s.substring(i, semi); i = semi + 1; } else { value = s.substring(i); i = n; } return value.trim(); } } /** * Reads a string quoted by a given character. Occurrences of the quoting * character must be doubled. For example, <code>parseQuoted('"')</code> * reads <code>"a ""new"" string"</code> and returns <code>a "new" * string</code>. * * @throws SQLException if find an unterminated quoted value */ String parseQuoted(char q) throws SQLException { char c = s.charAt(i++); if (c != q) { throw new AssertionError("c != q: c=" + c + " q=" + q); } valueBuf.setLength(0); while (i < n) { c = s.charAt(i); if (c == q) { i++; if (i < n) { c = s.charAt(i); if (c == q) { valueBuf.append(c); i++; continue; } } return valueBuf.toString(); } else { valueBuf.append(c); i++; } } throw new SQLException( "Connect string '" + s + "' contains unterminated quoted value '" + valueBuf.toString() + "'"); } /** * Returns a param string, quoted and escaped as needed, to represent the * supplied name-value pairs. * * @param props name-value pairs * * @return param string, never <code>null</code> */ public static String getParamString(Properties props) { if (props == null) { return ""; } StringBuilder buf = new StringBuilder(); for (Map.Entry<String, String> entry : toMap(props).entrySet()) { final String name = entry.getKey(); final String value = entry.getValue(); String quote = ""; if (buf.length() > 0) { buf.append(';'); } // write parameter name if (name.startsWith(" ") || name.endsWith(" ")) { quote = "'"; buf.append(quote); } int len = name.length(); for (int i = 0; i < len; ++i) { char c = name.charAt(i); if (c == '=') { buf.append('='); } buf.append(c); } buf.append(quote); // might be empty quote = ""; buf.append('='); // write parameter value len = value.length(); boolean hasSemi = value.indexOf(';') >= 0; boolean hasSQ = value.indexOf("'") >= 0; boolean hasDQ = value.indexOf('"') >= 0; if (value.startsWith(" ") || value.endsWith(" ")) { quote = "'"; } else if (hasSemi || hasSQ || hasDQ) { // try to choose the least painful quote if (value.startsWith("\"")) { quote = "'"; } else if (value.startsWith("'")) { quote = "\""; } else { quote = hasSQ ? "\"" : "'"; } } char q; if (quote.length() > 0) { buf.append(quote); q = quote.charAt(0); } else { q = '\0'; } for (int i = 0; i < len; ++i) { char c = value.charAt(i); if (c == q) { buf.append(q); } buf.append(c); } buf.append(quote); // might be empty } return buf.toString(); } /** * Converts a {@link Properties} object to a <code>{@link Map}&lt;String, * String&gt;</code>. * * <p>This is necessary because {@link Properties} is a dinosaur class. It * ought to extend <code>Map&lt;String,String&gt;</code>, but instead * extends <code>{@link java.util.Hashtable}&lt;Object,Object&gt;</code>. * * <p>Typical usage, to iterate over a {@link Properties}: * * <blockquote> * <code> * Properties properties;<br> * for (Map.Entry&lt;String, String&gt; entry = * Util.toMap(properties).entrySet()) {<br> * println("key=" + entry.getKey() + ", value=" + entry.getValue());<br> * } * </code> * </blockquote> */ public static Map<String, String> toMap( final Properties properties) { return (Map) properties; } } // End ConnectStringParser.java
apache-2.0
hifive/hifive-pitalium
pitalium/src/main/java/com/htmlhifive/pitalium/junit/PtlBlockJUnit4ClassRunnerWithParametersFactory.java
1272
/* * Copyright (C) 2015-2017 NS Solutions Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.htmlhifive.pitalium.junit; import org.junit.runner.Runner; import org.junit.runners.model.InitializationError; import org.junit.runners.parameterized.ParametersRunnerFactory; import org.junit.runners.parameterized.TestWithParameters; /** * {@link PtlBlockJUnit4ClassRunnerWithParameters}を生成する{@link ParametersRunnerFactory} * * @author nakatani */ public class PtlBlockJUnit4ClassRunnerWithParametersFactory implements ParametersRunnerFactory { @Override public Runner createRunnerForTestWithParameters(TestWithParameters test) throws InitializationError { return new PtlBlockJUnit4ClassRunnerWithParameters(test); } }
apache-2.0
EvilMcJerkface/crate
server/src/main/java/org/elasticsearch/common/logging/NodeAndClusterIdConverter.java
3212
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.common.logging; import org.apache.logging.log4j.core.LogEvent; import org.apache.logging.log4j.core.config.plugins.Plugin; import org.apache.logging.log4j.core.pattern.ConverterKeys; import org.apache.logging.log4j.core.pattern.LogEventPatternConverter; import org.apache.logging.log4j.core.pattern.PatternConverter; import org.apache.lucene.util.SetOnce; import java.util.Locale; /** * Pattern converter to format the node_and_cluster_id variable into JSON fields <code>node.id</code> and <code>cluster.uuid</code>. * Keeping those two fields together assures that they will be atomically set and become visible in logs at the same time. */ @Plugin(category = PatternConverter.CATEGORY, name = "NodeAndClusterIdConverter") @ConverterKeys({"node_and_cluster_id"}) public final class NodeAndClusterIdConverter extends LogEventPatternConverter { private static final SetOnce<String> NODE_AND_CLUSTER_ID = new SetOnce<>(); /** * Called by log4j2 to initialize this converter. */ public static NodeAndClusterIdConverter newInstance(@SuppressWarnings("unused") final String[] options) { return new NodeAndClusterIdConverter(); } public NodeAndClusterIdConverter() { super("NodeAndClusterId", "node_and_cluster_id"); } /** * Updates only once the clusterID and nodeId. * Subsequent executions will throw {@link org.apache.lucene.util.SetOnce.AlreadySetException}. * * @param nodeId a nodeId received from cluster state update * @param clusterUUID a clusterId received from cluster state update */ public static void setNodeIdAndClusterId(String nodeId, String clusterUUID) { NODE_AND_CLUSTER_ID.set(formatIds(clusterUUID, nodeId)); } /** * Formats the node.id and cluster.uuid into json fields. * * @param event - a log event is ignored in this method as it uses the nodeId and clusterId to format */ @Override public void format(LogEvent event, StringBuilder toAppendTo) { if (NODE_AND_CLUSTER_ID.get() != null) { toAppendTo.append(NODE_AND_CLUSTER_ID.get()); } // nodeId/clusterUuid not received yet, not appending } private static String formatIds(String clusterUUID, String nodeId) { return String.format(Locale.ROOT, "\"cluster.uuid\": \"%s\", \"node.id\": \"%s\"", clusterUUID, nodeId); } }
apache-2.0
NextGenIntelligence/gerrit
gerrit-gwtui/src/main/java/com/google/gerrit/client/Dispatcher.java
31307
// Copyright (C) 2008 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.client; import static com.google.gerrit.common.PageLinks.ADMIN_CREATE_GROUP; import static com.google.gerrit.common.PageLinks.ADMIN_CREATE_PROJECT; import static com.google.gerrit.common.PageLinks.ADMIN_GROUPS; import static com.google.gerrit.common.PageLinks.ADMIN_PLUGINS; import static com.google.gerrit.common.PageLinks.ADMIN_PROJECTS; import static com.google.gerrit.common.PageLinks.DASHBOARDS; import static com.google.gerrit.common.PageLinks.MINE; import static com.google.gerrit.common.PageLinks.MY_GROUPS; import static com.google.gerrit.common.PageLinks.PROJECTS; import static com.google.gerrit.common.PageLinks.QUERY; import static com.google.gerrit.common.PageLinks.REGISTER; import static com.google.gerrit.common.PageLinks.SETTINGS; import static com.google.gerrit.common.PageLinks.SETTINGS_AGREEMENTS; import static com.google.gerrit.common.PageLinks.SETTINGS_CONTACT; import static com.google.gerrit.common.PageLinks.SETTINGS_HTTP_PASSWORD; import static com.google.gerrit.common.PageLinks.SETTINGS_MYGROUPS; import static com.google.gerrit.common.PageLinks.SETTINGS_NEW_AGREEMENT; import static com.google.gerrit.common.PageLinks.SETTINGS_PREFERENCES; import static com.google.gerrit.common.PageLinks.SETTINGS_PROJECTS; import static com.google.gerrit.common.PageLinks.SETTINGS_SSHKEYS; import static com.google.gerrit.common.PageLinks.SETTINGS_WEBIDENT; import static com.google.gerrit.common.PageLinks.op; import static com.google.gerrit.common.PageLinks.toChangeQuery; import com.google.gerrit.client.account.MyAgreementsScreen; import com.google.gerrit.client.account.MyContactInformationScreen; import com.google.gerrit.client.account.MyGroupsScreen; import com.google.gerrit.client.account.MyIdentitiesScreen; import com.google.gerrit.client.account.MyPasswordScreen; import com.google.gerrit.client.account.MyPreferencesScreen; import com.google.gerrit.client.account.MyProfileScreen; import com.google.gerrit.client.account.MySshKeysScreen; import com.google.gerrit.client.account.MyWatchedProjectsScreen; import com.google.gerrit.client.account.NewAgreementScreen; import com.google.gerrit.client.account.RegisterScreen; import com.google.gerrit.client.account.ValidateEmailScreen; import com.google.gerrit.client.admin.AccountGroupInfoScreen; import com.google.gerrit.client.admin.AccountGroupMembersScreen; import com.google.gerrit.client.admin.AccountGroupScreen; import com.google.gerrit.client.admin.CreateGroupScreen; import com.google.gerrit.client.admin.CreateProjectScreen; import com.google.gerrit.client.admin.GroupListScreen; import com.google.gerrit.client.admin.PluginListScreen; import com.google.gerrit.client.admin.ProjectAccessScreen; import com.google.gerrit.client.admin.ProjectBranchesScreen; import com.google.gerrit.client.admin.ProjectDashboardsScreen; import com.google.gerrit.client.admin.ProjectInfoScreen; import com.google.gerrit.client.admin.ProjectListScreen; import com.google.gerrit.client.admin.ProjectScreen; import com.google.gerrit.client.api.ExtensionScreen; import com.google.gerrit.client.change.ChangeScreen; import com.google.gerrit.client.change.FileTable; import com.google.gerrit.client.changes.AccountDashboardScreen; import com.google.gerrit.client.changes.CustomDashboardScreen; import com.google.gerrit.client.changes.ProjectDashboardScreen; import com.google.gerrit.client.changes.QueryScreen; import com.google.gerrit.client.dashboards.DashboardInfo; import com.google.gerrit.client.dashboards.DashboardList; import com.google.gerrit.client.diff.DisplaySide; import com.google.gerrit.client.diff.SideBySide; import com.google.gerrit.client.documentation.DocScreen; import com.google.gerrit.client.editor.EditScreen; import com.google.gerrit.client.groups.GroupApi; import com.google.gerrit.client.groups.GroupInfo; import com.google.gerrit.client.patches.UnifiedPatchScreen; import com.google.gerrit.client.rpc.GerritCallback; import com.google.gerrit.client.rpc.RestApi; import com.google.gerrit.client.ui.Screen; import com.google.gerrit.common.PageLinks; import com.google.gerrit.reviewdb.client.Account; import com.google.gerrit.reviewdb.client.AccountGeneralPreferences.DiffView; import com.google.gerrit.reviewdb.client.AccountGroup; import com.google.gerrit.reviewdb.client.Change; import com.google.gerrit.reviewdb.client.Patch; import com.google.gerrit.reviewdb.client.PatchSet; import com.google.gerrit.reviewdb.client.Project; import com.google.gwt.core.client.GWT; import com.google.gwt.core.client.RunAsyncCallback; import com.google.gwt.http.client.URL; import com.google.gwt.user.client.Window; import com.google.gwtorm.client.KeyUtil; public class Dispatcher { public static String toSideBySide(PatchSet.Id diffBase, Patch.Key id) { return toPatch("", diffBase, id); } public static String toSideBySide(PatchSet.Id diffBase, PatchSet.Id revision, String fileName) { return toPatch("", diffBase, revision, fileName, null, 0); } public static String toSideBySide(PatchSet.Id diffBase, PatchSet.Id revision, String fileName, DisplaySide side, int line) { return toPatch("", diffBase, revision, fileName, side, line); } public static String toUnified(PatchSet.Id diffBase, PatchSet.Id revision, String fileName) { return toPatch("unified", diffBase, revision, fileName, null, 0); } public static String toUnified(PatchSet.Id diffBase, Patch.Key id) { return toPatch("unified", diffBase, id); } public static String toPatch(String type, PatchSet.Id diffBase, Patch.Key id) { return toPatch(type, diffBase, id.getParentKey(), id.get(), null, 0); } public static String toEditScreen(PatchSet.Id revision, String fileName) { return toEditScreen(revision, fileName, 0); } public static String toEditScreen(PatchSet.Id revision, String fileName, int line) { return toPatch("edit", null, revision, fileName, null, line); } private static String toPatch(String type, PatchSet.Id diffBase, PatchSet.Id revision, String fileName, DisplaySide side, int line) { Change.Id c = revision.getParentKey(); StringBuilder p = new StringBuilder(); p.append("/c/").append(c).append("/"); if (diffBase != null) { p.append(diffBase.get()).append(".."); } p.append(revision.getId()).append("/").append(KeyUtil.encode(fileName)); if (type != null && !type.isEmpty() && (!"sidebyside".equals(type) || preferUnified())) { p.append(",").append(type); } if (side == DisplaySide.A && line > 0) { p.append("@a").append(line); } else if (line > 0) { p.append("@").append(line); } return p.toString(); } public static String toGroup(final AccountGroup.Id id) { return ADMIN_GROUPS + id.toString(); } public static String toGroup(AccountGroup.Id id, String panel) { return ADMIN_GROUPS + id.toString() + "," + panel; } public static String toGroup(AccountGroup.UUID uuid) { return PageLinks.toGroup(uuid); } public static String toGroup(AccountGroup.UUID uuid, String panel) { return toGroup(uuid) + "," + panel; } public static String toProject(Project.NameKey n) { return toProjectAdmin(n, ProjectScreen.getSavedPanel()); } public static String toProjectAdmin(Project.NameKey n, String panel) { if (panel == null || ProjectScreen.INFO.equals(panel)) { return ADMIN_PROJECTS + n.toString(); } return ADMIN_PROJECTS + n.toString() + "," + panel; } static final String RELOAD_UI = "/reload-ui/"; private static boolean wasStartedByReloadUI; void display(String token) { assert token != null; try { try { if (matchPrefix(RELOAD_UI, token)) { wasStartedByReloadUI = true; token = skip(token); } select(token); } finally { wasStartedByReloadUI = false; } } catch (RuntimeException err) { GWT.log("Error parsing history token: " + token, err); Gerrit.display(token, new NotFoundScreen()); } } private static void select(final String token) { if (matchPrefix(QUERY, token)) { query(token); } else if (matchPrefix("/Documentation/", token)) { docSearch(token); } else if (matchPrefix("/c/", token)) { change(token); } else if (matchPrefix("/x/", token)) { extension(token); } else if (matchExact(MINE, token)) { String defaultScreenToken = Gerrit.getDefaultScreenToken(); if (defaultScreenToken != null && !MINE.equals(defaultScreenToken)) { select(defaultScreenToken); } else { Gerrit.display(token, mine()); } } else if (matchPrefix("/dashboard/", token)) { dashboard(token); } else if (matchPrefix(PROJECTS, token)) { projects(token); } else if (matchExact(SETTINGS, token) || matchPrefix("/settings/", token) || matchExact(MY_GROUPS, token) || matchExact("register", token) || matchExact(REGISTER, token) || matchPrefix("/register/", token) || matchPrefix("/VE/", token) || matchPrefix("VE,", token) || matchPrefix("/SignInFailure,", token)) { settings(token); } else if (matchPrefix("/admin/", token)) { admin(token); } else if (/* DEPRECATED URL */matchPrefix("/c2/", token)) { change(token); } else if (/* LEGACY URL */matchPrefix("all,", token)) { redirectFromLegacyToken(token, legacyAll(token)); } else if (/* LEGACY URL */matchPrefix("mine,", token) || matchExact("mine", token)) { redirectFromLegacyToken(token, legacyMine(token)); } else if (/* LEGACY URL */matchPrefix("project,", token)) { redirectFromLegacyToken(token, legacyProject(token)); } else if (/* LEGACY URL */matchPrefix("change,", token)) { redirectFromLegacyToken(token, legacyChange(token)); } else if (/* LEGACY URL */matchPrefix("patch,", token)) { redirectFromLegacyToken(token, legacyPatch(token)); } else if (/* LEGACY URL */matchPrefix("admin,", token)) { redirectFromLegacyToken(token, legacyAdmin(token)); } else if (/* LEGACY URL */matchPrefix("settings,", token) || matchPrefix("register,", token) || matchPrefix("q,", token)) { redirectFromLegacyToken(token, legacySettings(token)); } else { Gerrit.display(token, new NotFoundScreen()); } } private static void redirectFromLegacyToken(String oldToken, String newToken) { if (newToken != null) { Window.Location.replace(Window.Location.getPath() + "#" + newToken); } else { Gerrit.display(oldToken, new NotFoundScreen()); } } private static String legacyMine(final String token) { if (matchExact("mine", token)) { return MINE; } if (matchExact("mine,starred", token)) { return toChangeQuery("is:starred"); } if (matchExact("mine,drafts", token)) { return toChangeQuery("owner:self is:draft"); } if (matchExact("mine,comments", token)) { return toChangeQuery("has:draft"); } if (matchPrefix("mine,watched,", token)) { return toChangeQuery("is:watched status:open"); } return null; } private static String legacyAll(final String token) { if (matchPrefix("all,abandoned,", token)) { return toChangeQuery("status:abandoned"); } if (matchPrefix("all,merged,", token)) { return toChangeQuery("status:merged"); } if (matchPrefix("all,open,", token)) { return toChangeQuery("status:open"); } return null; } private static String legacyProject(final String token) { if (matchPrefix("project,open,", token)) { final String s = skip(token); final int c = s.indexOf(','); Project.NameKey proj = Project.NameKey.parse(s.substring(0, c)); return toChangeQuery("status:open " + op("project", proj.get())); } if (matchPrefix("project,merged,", token)) { final String s = skip(token); final int c = s.indexOf(','); Project.NameKey proj = Project.NameKey.parse(s.substring(0, c)); return toChangeQuery("status:merged " + op("project", proj.get())); } if (matchPrefix("project,abandoned,", token)) { final String s = skip(token); final int c = s.indexOf(','); Project.NameKey proj = Project.NameKey.parse(s.substring(0, c)); return toChangeQuery("status:abandoned " + op("project", proj.get())); } return null; } private static String legacyChange(final String token) { final String s = skip(token); final String[] t = s.split(",", 2); if (t.length > 1 && matchPrefix("patchset=", t[1])) { return PageLinks.toChange(PatchSet.Id.parse(t[0] + "," + skip(t[1]))); } return PageLinks.toChange(Change.Id.parse(t[0])); } private static String legacyPatch(String token) { if (/* LEGACY URL */matchPrefix("patch,sidebyside,", token)) { return toPatch("", null, Patch.Key.parse(skip(token))); } if (/* LEGACY URL */matchPrefix("patch,unified,", token)) { return toPatch("unified", null, Patch.Key.parse(skip(token))); } return null; } private static String legacyAdmin(String token) { if (matchPrefix("admin,group,", token)) { return ADMIN_GROUPS + skip(token); } if (matchPrefix("admin,project,", token)) { String rest = skip(token); int c = rest.indexOf(','); String panel; Project.NameKey k; if (0 < c) { panel = rest.substring(c + 1); k = Project.NameKey.parse(rest.substring(0, c)); } else { panel = ProjectScreen.INFO; k = Project.NameKey.parse(rest); } return toProjectAdmin(k, panel); } return null; } private static String legacySettings(String token) { int c = token.indexOf(','); if (0 < c) { return "/" + token.substring(0, c) + "/" + token.substring(c + 1); } return null; } private static void query(String token) { String s = skip(token); int c = s.indexOf(','); Screen screen; if (c >= 0) { String prefix = s.substring(0, c); if (s.substring(c).equals(",n,z")) { // Respect legacy token with max sortkey. screen = new QueryScreen(prefix, 0); } else { screen = new QueryScreen(prefix, Integer.parseInt(s.substring(c + 1))); } } else { screen = new QueryScreen(s, 0); } Gerrit.display(token, screen); } private static Screen mine() { if (Gerrit.isSignedIn()) { return new AccountDashboardScreen(Gerrit.getUserAccount().getId()); } else { Screen r = new AccountDashboardScreen(null); r.setRequiresSignIn(true); return r; } } private static void dashboard(final String token) { String rest = skip(token); if (rest.matches("[0-9]+")) { Gerrit.display(token, new AccountDashboardScreen(Account.Id.parse(rest))); return; } if (rest.equals("self")) { if (Gerrit.isSignedIn()) { Gerrit.display(token, new AccountDashboardScreen(Gerrit.getUserAccount().getId())); } else { Screen s = new AccountDashboardScreen(null); s.setRequiresSignIn(true); Gerrit.display(token, s); } return; } if (rest.startsWith("?")) { Gerrit.display(token, new CustomDashboardScreen(rest.substring(1))); return; } Gerrit.display(token, new NotFoundScreen()); } private static void projects(final String token) { String rest = skip(token); int c = rest.indexOf(DASHBOARDS); if (0 <= c) { final String project = URL.decodePathSegment(rest.substring(0, c)); rest = rest.substring(c); if (matchPrefix(DASHBOARDS, rest)) { final String dashboardId = skip(rest); GerritCallback<DashboardInfo> cb = new GerritCallback<DashboardInfo>() { @Override public void onSuccess(DashboardInfo result) { if (matchPrefix("/dashboard/", result.url())) { String params = skip(result.url()).substring(1); ProjectDashboardScreen dash = new ProjectDashboardScreen( new Project.NameKey(project), params); Gerrit.display(token, dash); } } @Override public void onFailure(Throwable caught) { if ("default".equals(dashboardId) && RestApi.isNotFound(caught)) { Gerrit.display(toChangeQuery( PageLinks.projectQuery(new Project.NameKey(project)))); } else { super.onFailure(caught); } } }; if ("default".equals(dashboardId)) { DashboardList.getDefault(new Project.NameKey(project), cb); return; } c = dashboardId.indexOf(":"); if (0 <= c) { final String ref = URL.decodeQueryString(dashboardId.substring(0, c)); final String path = URL.decodeQueryString(dashboardId.substring(c + 1)); DashboardList.get(new Project.NameKey(project), ref + ":" + path, cb); return; } } } Gerrit.display(token, new NotFoundScreen()); } private static void change(final String token) { String rest = skip(token); int c = rest.lastIndexOf(','); String panel = null; if (0 <= c) { panel = rest.substring(c + 1); rest = rest.substring(0, c); int at = panel.lastIndexOf('@'); if (at > 0) { rest += panel.substring(at); panel = panel.substring(0, at); } } Change.Id id; int s = rest.indexOf('/'); if (0 <= s) { id = Change.Id.parse(rest.substring(0, s)); rest = rest.substring(s + 1); } else { id = Change.Id.parse(rest); rest = ""; } if (rest.isEmpty()) { FileTable.Mode mode = FileTable.Mode.REVIEW; if (panel != null && (panel.equals("edit") || panel.startsWith("edit/"))) { mode = FileTable.Mode.EDIT; panel = null; } Gerrit.display(token, panel == null ? new ChangeScreen(id, null, null, false, mode) : new NotFoundScreen()); return; } String psIdStr; s = rest.indexOf('/'); if (0 <= s) { psIdStr = rest.substring(0, s); rest = rest.substring(s + 1); } else { psIdStr = rest; rest = ""; } PatchSet.Id base = null; PatchSet.Id ps; int dotdot = psIdStr.indexOf(".."); if (1 <= dotdot) { base = new PatchSet.Id(id, Integer.parseInt(psIdStr.substring(0, dotdot))); psIdStr = psIdStr.substring(dotdot + 2); } ps = toPsId(id, psIdStr); if (!rest.isEmpty()) { DisplaySide side = DisplaySide.B; int line = 0; int at = rest.lastIndexOf('@'); if (at > 0) { String l = rest.substring(at+1); if (l.startsWith("a")) { side = DisplaySide.A; l = l.substring(1); } line = Integer.parseInt(l); rest = rest.substring(0, at); } Patch.Key p = new Patch.Key(ps, KeyUtil.decode(rest)); patch(token, base, p, side, line, panel); } else { if (panel == null) { Gerrit.display(token, new ChangeScreen(id, base != null ? String.valueOf(base.get()) : null, String.valueOf(ps.get()), false, FileTable.Mode.REVIEW)); } else { Gerrit.display(token, new NotFoundScreen()); } } } private static PatchSet.Id toPsId(Change.Id id, String psIdStr) { return new PatchSet.Id(id, psIdStr.equals("edit") ? 0 : Integer.parseInt(psIdStr)); } private static void extension(final String token) { ExtensionScreen view = new ExtensionScreen(skip(token)); if (view.isFound()) { Gerrit.display(token, view); } else { Gerrit.display(token, new NotFoundScreen()); } } private static void patch(String token, PatchSet.Id baseId, Patch.Key id, DisplaySide side, int line, String panelType) { String panel = panelType; if (panel == null) { int c = token.lastIndexOf(','); panel = 0 <= c ? token.substring(c + 1) : ""; } if ("".equals(panel) || /* DEPRECATED URL */"cm".equals(panel)) { if (preferUnified()) { unified(token, baseId, id); } else { codemirror(token, baseId, id, side, line, false); } } else if ("sidebyside".equals(panel)) { codemirror(token, null, id, side, line, false); } else if ("unified".equals(panel)) { unified(token, baseId, id); } else if ("edit".equals(panel)) { codemirror(token, null, id, side, line, true); } else { Gerrit.display(token, new NotFoundScreen()); } } private static boolean preferUnified() { return Gerrit.isSignedIn() && DiffView.UNIFIED_DIFF.equals(Gerrit.getUserAccount() .getGeneralPreferences() .getDiffView()); } private static void unified(final String token, final PatchSet.Id baseId, final Patch.Key id) { GWT.runAsync(new AsyncSplit(token) { @Override public void onSuccess() { UnifiedPatchScreen.TopView top = Gerrit.getPatchScreenTopView(); Gerrit.display(token, new UnifiedPatchScreen(id, top, baseId)); } }); } private static void codemirror(final String token, final PatchSet.Id baseId, final Patch.Key id, final DisplaySide side, final int line, final boolean edit) { GWT.runAsync(new AsyncSplit(token) { @Override public void onSuccess() { Gerrit.display(token, edit ? new EditScreen(baseId, id, line) : new SideBySide(baseId, id.getParentKey(), id.get(), side, line)); } }); } private static void settings(String token) { GWT.runAsync(new AsyncSplit(token) { @Override public void onSuccess() { Gerrit.display(token, select()); } private Screen select() { if (matchExact(SETTINGS, token)) { return new MyProfileScreen(); } if (matchExact(SETTINGS_PREFERENCES, token)) { return new MyPreferencesScreen(); } if (matchExact(SETTINGS_PROJECTS, token)) { return new MyWatchedProjectsScreen(); } if (matchExact(SETTINGS_CONTACT, token)) { return new MyContactInformationScreen(); } if (matchExact(SETTINGS_SSHKEYS, token)) { return new MySshKeysScreen(); } if (matchExact(SETTINGS_WEBIDENT, token)) { return new MyIdentitiesScreen(); } if (matchExact(SETTINGS_HTTP_PASSWORD, token)) { return new MyPasswordScreen(); } if (matchExact(MY_GROUPS, token) || matchExact(SETTINGS_MYGROUPS, token)) { return new MyGroupsScreen(); } if (matchExact(SETTINGS_AGREEMENTS, token) && Gerrit.info().auth().useContributorAgreements()) { return new MyAgreementsScreen(); } if (matchExact(REGISTER, token) || matchExact("/register/", token) || matchExact("register", token)) { return new RegisterScreen(MINE); } else if (matchPrefix("/register/", token)) { return new RegisterScreen("/" + skip(token)); } if (matchPrefix("/VE/", token) || matchPrefix("VE,", token)) { return new ValidateEmailScreen(skip(token)); } if (matchExact(SETTINGS_NEW_AGREEMENT, token)) { return new NewAgreementScreen(); } if (matchPrefix(SETTINGS_NEW_AGREEMENT + "/", token)) { return new NewAgreementScreen(skip(token)); } return new NotFoundScreen(); } }); } private static void admin(String token) { GWT.runAsync(new AsyncSplit(token) { @Override public void onSuccess() { if (matchExact(ADMIN_GROUPS, token) || matchExact("/admin/groups", token)) { Gerrit.display(token, new GroupListScreen()); } else if (matchPrefix(ADMIN_GROUPS, token)) { String rest = skip(token); if (rest.startsWith("?")) { Gerrit.display(token, new GroupListScreen(rest.substring(1))); } else { group(); } } else if (matchPrefix("/admin/groups", token)) { String rest = skip(token); if (rest.startsWith("?")) { Gerrit.display(token, new GroupListScreen(rest.substring(1))); } } else if (matchExact(ADMIN_PROJECTS, token) || matchExact("/admin/projects", token)) { Gerrit.display(token, new ProjectListScreen()); } else if (matchPrefix(ADMIN_PROJECTS, token)) { String rest = skip(token); if (rest.startsWith("?")) { Gerrit.display(token, new ProjectListScreen(rest.substring(1))); } else { Gerrit.display(token, selectProject()); } } else if (matchPrefix("/admin/projects", token)) { String rest = skip(token); if (rest.startsWith("?")) { Gerrit.display(token, new ProjectListScreen(rest.substring(1))); } } else if (matchPrefix(ADMIN_PLUGINS, token) || matchExact("/admin/plugins", token)) { Gerrit.display(token, new PluginListScreen()); } else if (matchExact(ADMIN_CREATE_PROJECT, token) || matchExact("/admin/create-project", token)) { Gerrit.display(token, new CreateProjectScreen()); } else if (matchExact(ADMIN_CREATE_GROUP, token) || matchExact("/admin/create-group", token)) { Gerrit.display(token, new CreateGroupScreen()); } else { Gerrit.display(token, new NotFoundScreen()); } } private void group() { final String panel; final String group; if (matchPrefix("/admin/groups/uuid-", token)) { String p = skip(token); int c = p.indexOf(','); if (c < 0) { group = p; panel = null; } else { group = p.substring(0, c); panel = p.substring(c + 1); } } else if (matchPrefix(ADMIN_GROUPS, token)) { String p = skip(token); int c = p.indexOf(','); if (c < 0) { group = p; panel = null; } else { group = p.substring(0, c); panel = p.substring(c + 1); } } else { Gerrit.display(token, new NotFoundScreen()); return; } GroupApi.getGroupDetail(group, new GerritCallback<GroupInfo>() { @Override public void onSuccess(GroupInfo group) { if (panel == null || panel.isEmpty()) { // The token does not say which group screen should be shown, // as default for internal groups show the members, as default // for external and system groups show the info screen (since // for external and system groups the members cannot be // shown in the web UI). // if (AccountGroup.isInternalGroup(group.getGroupUUID())) { Gerrit.display(toGroup(group.getGroupId(), AccountGroupScreen.MEMBERS), new AccountGroupMembersScreen(group, token)); } else { Gerrit.display(toGroup(group.getGroupId(), AccountGroupScreen.INFO), new AccountGroupInfoScreen(group, token)); } } else if (AccountGroupScreen.INFO.equals(panel)) { Gerrit.display(token, new AccountGroupInfoScreen(group, token)); } else if (AccountGroupScreen.MEMBERS.equals(panel)) { Gerrit.display(token, new AccountGroupMembersScreen(group, token)); } else { Gerrit.display(token, new NotFoundScreen()); } } }); } private Screen selectProject() { if (matchPrefix(ADMIN_PROJECTS, token)) { String rest = skip(token); int c = rest.lastIndexOf(','); if (c < 0) { return new ProjectInfoScreen(Project.NameKey.parse(rest)); } else if (c == 0) { return new NotFoundScreen(); } int q = rest.lastIndexOf('?'); if (q > 0 && rest.lastIndexOf(',', q) > 0) { c = rest.substring(0, q - 1).lastIndexOf(','); } Project.NameKey k = Project.NameKey.parse(rest.substring(0, c)); String panel = rest.substring(c + 1); if (ProjectScreen.INFO.equals(panel)) { return new ProjectInfoScreen(k); } if (ProjectScreen.BRANCH.equals(panel) || matchPrefix(ProjectScreen.BRANCH, panel)) { return new ProjectBranchesScreen(k); } if (ProjectScreen.ACCESS.equals(panel)) { return new ProjectAccessScreen(k); } if (ProjectScreen.DASHBOARDS.equals(panel)) { return new ProjectDashboardsScreen(k); } } return new NotFoundScreen(); } }); } private static boolean matchExact(String want, String token) { return token.equals(want); } private static int prefixlen; private static boolean matchPrefix(String want, String token) { if (token.startsWith(want)) { prefixlen = want.length(); return true; } else { return false; } } private static String skip(String token) { return token.substring(prefixlen); } private abstract static class AsyncSplit implements RunAsyncCallback { private final boolean isReloadUi; protected final String token; protected AsyncSplit(String token) { this.isReloadUi = wasStartedByReloadUI; this.token = token; } @Override public final void onFailure(Throwable reason) { if (!isReloadUi && "HTTP download failed with status 404".equals(reason.getMessage())) { // The server was upgraded since we last download the main script, // so the pointers to the splits aren't valid anymore. Force the // page to reload itself and pick up the new code. // Gerrit.upgradeUI(token); } else { new ErrorDialog(reason).center(); } } } private static void docSearch(final String token) { GWT.runAsync(new AsyncSplit(token) { @Override public void onSuccess() { Gerrit.display(token, new DocScreen(skip(token))); } }); } }
apache-2.0
groupon/pebble
src/main/java/org/pebble/core/decoding/iterators/longs/BaseListIterator.java
6557
/** * Copyright 2015 Groupon * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.pebble.core.decoding.iterators.longs; import it.unimi.dsi.fastutil.longs.LongIterator; import org.pebble.core.PebbleBytesStore; import org.pebble.core.decoding.InputBitStream; import java.io.IOException; /** * Base class for implementing Pebble's compressed list iterator. */ abstract class BaseListIterator implements LongIterator { /** * Fixed number of bits used to represent value in list to be encoded. It can be any value between 1bit and 63 bits. */ protected final int valueBitSize; /** * Min interval size to be encoded as interval. */ protected final int minIntervalSize; /** * Input bit stream used to read the compressed list representation. */ protected final InputBitStream inputBitStream; /** * Mapping between list offsets and data bytes arrays and bytes offsets. */ protected final PebbleBytesStore bytesStore; private final CompressionIterator referenceIt; private final CompressionIterator intervalIt; private final CompressionIterator deltaIt; /** * Initializes the iterators of each piece of the compressed representation of a list. * <ul> * <li>Reference iterator.</li> * <li>Intervals iterator.</li> * <li>Delta iterator.</li> * </ul> * @param listIndex offset of the current list that is described in terms of reference. * @param valueBitSize fixed number of bits used to represent value in list to be encoded. It can be any value * between 1bit and 63 bits. * @param minIntervalSize min interval size to be encoded as interval. * @param inputBitStream input bit stream used to read the compressed list representation. * @param bytesStore mapping between list offsets and data bytes arrays and bytes offsets. * @throws java.io.IOException when there is an exception reading from <code>inputBitStream</code> */ public BaseListIterator( final int listIndex, final int valueBitSize, final int minIntervalSize, final InputBitStream inputBitStream, final PebbleBytesStore bytesStore ) throws IOException { this.valueBitSize = valueBitSize; this.minIntervalSize = minIntervalSize; this.inputBitStream = inputBitStream; this.bytesStore = bytesStore; referenceIt = initializeReferenceIterator(listIndex, inputBitStream); inputBitStream.skipDeltas(referenceIt.remainingElements); intervalIt = new IntervalIterator(valueBitSize, minIntervalSize, inputBitStream); inputBitStream.skipDeltas(intervalIt.remainingElements * 2); deltaIt = new DeltaIterator(valueBitSize, inputBitStream); inputBitStream.skipDeltas(deltaIt.remainingElements); } /** * Returns the next <code>int</code> in the iteration. When there is no more elements returns -1 * @return the next <code>int</code> in the iteration */ @Override public long nextLong() { try { if ( referenceIt.currentValue != -1L && (intervalIt.currentValue == -1L || referenceIt.currentValue < intervalIt.currentValue) && (deltaIt.currentValue == -1L || referenceIt.currentValue < deltaIt.currentValue) ) { return referenceIt.next(); } if ( intervalIt.currentValue != -1L && (deltaIt.currentValue == -1L || intervalIt.currentValue < deltaIt.currentValue) ) { return intervalIt.next(); } return deltaIt.next(); } catch (IOException exception) { throw new IllegalStateException(exception.getMessage()); } } /** * checks whether the iteration has remaining elements or not. * @return true if there is remaining elements in the iteration and false whether not. */ @Override public boolean hasNext() { return referenceIt.hasNext() || intervalIt.hasNext() || deltaIt.hasNext(); } /** * Returns the next {@link Integer} in the iteration. When there is no more elements returns null. This * method wrap into an {@link Integer} the result from * {@link org.pebble.core.decoding.iterators.longs.BaseListIterator#nextLong()} method. * @return the next {@link Integer} in the iteration. */ @Override public Long next() { final long value = nextLong(); return value == -1L ? null : value; } /** * This method skips <code>i</code> elements from current element on iteration. * @param i number of elements from current iteration position to be skipped. * @return the actual number of skipped elements. When the remaining elements in the iterator is smaller than * <code>i</code> only the remaining elements will be skipped. */ @Override public int skip(final int i) { int n = 0; while (hasNext() && n < i) { nextLong(); n++; } return n; } /** * The iterator is from an immutable list, therefore elements can't be removed. If this method is invoked a * {@link UnsupportedOperationException} will be thrown. */ @Override public void remove() { throw new UnsupportedOperationException("The list is immutable"); } /** * Initialize reference iterator. * @param listIndex index of the current list that is described in terms of reference. * @param inputBitStream input bit stream used to read the compressed list representation. * @return initialized reference iterator * @throws java.io.IOException when the initialization of reference iterator raises the exception */ protected abstract ReferenceIterator initializeReferenceIterator( final int listIndex, final InputBitStream inputBitStream ) throws IOException; }
apache-2.0
ChengZhang1213/NativeDisplayImage
openCVLibrary310/src/main/java/org/opencv/features2d/Features2d.java
7884
// // This file is auto-generated. Please don't modify it! // package org.opencv.features2d; import org.opencv.core.Mat; import org.opencv.core.MatOfByte; import org.opencv.core.MatOfDMatch; import org.opencv.core.MatOfKeyPoint; import org.opencv.core.Scalar; import org.opencv.utils.Converters; import java.util.ArrayList; import java.util.List; public class Features2d { public static final int DRAW_OVER_OUTIMG = 1, NOT_DRAW_SINGLE_POINTS = 2, DRAW_RICH_KEYPOINTS = 4; // // C++: void drawKeypoints(Mat image, vector_KeyPoint keypoints, Mat outImage, Scalar color = Scalar::all(-1), int flags = 0) // //javadoc: drawKeypoints(image, keypoints, outImage, color, flags) public static void drawKeypoints(Mat image, MatOfKeyPoint keypoints, Mat outImage, Scalar color, int flags) { Mat keypoints_mat = keypoints; drawKeypoints_0(image.nativeObj, keypoints_mat.nativeObj, outImage.nativeObj, color.val[0], color.val[1], color.val[2], color.val[3], flags); return; } //javadoc: drawKeypoints(image, keypoints, outImage) public static void drawKeypoints(Mat image, MatOfKeyPoint keypoints, Mat outImage) { Mat keypoints_mat = keypoints; drawKeypoints_1(image.nativeObj, keypoints_mat.nativeObj, outImage.nativeObj); return; } // // C++: void drawMatches(Mat img1, vector_KeyPoint keypoints1, Mat img2, vector_KeyPoint keypoints2, vector_DMatch matches1to2, Mat outImg, Scalar matchColor = Scalar::all(-1), Scalar singlePointColor = Scalar::all(-1), vector_char matchesMask = std::vector<char>(), int flags = 0) // //javadoc: drawMatches(img1, keypoints1, img2, keypoints2, matches1to2, outImg, matchColor, singlePointColor, matchesMask, flags) public static void drawMatches(Mat img1, MatOfKeyPoint keypoints1, Mat img2, MatOfKeyPoint keypoints2, MatOfDMatch matches1to2, Mat outImg, Scalar matchColor, Scalar singlePointColor, MatOfByte matchesMask, int flags) { Mat keypoints1_mat = keypoints1; Mat keypoints2_mat = keypoints2; Mat matches1to2_mat = matches1to2; Mat matchesMask_mat = matchesMask; drawMatches_0(img1.nativeObj, keypoints1_mat.nativeObj, img2.nativeObj, keypoints2_mat.nativeObj, matches1to2_mat.nativeObj, outImg.nativeObj, matchColor.val[0], matchColor.val[1], matchColor.val[2], matchColor.val[3], singlePointColor.val[0], singlePointColor.val[1], singlePointColor.val[2], singlePointColor.val[3], matchesMask_mat.nativeObj, flags); return; } //javadoc: drawMatches(img1, keypoints1, img2, keypoints2, matches1to2, outImg) public static void drawMatches(Mat img1, MatOfKeyPoint keypoints1, Mat img2, MatOfKeyPoint keypoints2, MatOfDMatch matches1to2, Mat outImg) { Mat keypoints1_mat = keypoints1; Mat keypoints2_mat = keypoints2; Mat matches1to2_mat = matches1to2; drawMatches_1(img1.nativeObj, keypoints1_mat.nativeObj, img2.nativeObj, keypoints2_mat.nativeObj, matches1to2_mat.nativeObj, outImg.nativeObj); return; } // // C++: void drawMatches(Mat img1, vector_KeyPoint keypoints1, Mat img2, vector_KeyPoint keypoints2, vector_vector_DMatch matches1to2, Mat outImg, Scalar matchColor = Scalar::all(-1), Scalar singlePointColor = Scalar::all(-1), vector_vector_char matchesMask = std::vector<std::vector<char> >(), int flags = 0) // //javadoc: drawMatches(img1, keypoints1, img2, keypoints2, matches1to2, outImg, matchColor, singlePointColor, matchesMask, flags) public static void drawMatches2(Mat img1, MatOfKeyPoint keypoints1, Mat img2, MatOfKeyPoint keypoints2, List<MatOfDMatch> matches1to2, Mat outImg, Scalar matchColor, Scalar singlePointColor, List<MatOfByte> matchesMask, int flags) { Mat keypoints1_mat = keypoints1; Mat keypoints2_mat = keypoints2; List<Mat> matches1to2_tmplm = new ArrayList<Mat>((matches1to2 != null) ? matches1to2.size() : 0); Mat matches1to2_mat = Converters.vector_vector_DMatch_to_Mat(matches1to2, matches1to2_tmplm); List<Mat> matchesMask_tmplm = new ArrayList<Mat>((matchesMask != null) ? matchesMask.size() : 0); Mat matchesMask_mat = Converters.vector_vector_char_to_Mat(matchesMask, matchesMask_tmplm); drawMatches2_0(img1.nativeObj, keypoints1_mat.nativeObj, img2.nativeObj, keypoints2_mat.nativeObj, matches1to2_mat.nativeObj, outImg.nativeObj, matchColor.val[0], matchColor.val[1], matchColor.val[2], matchColor.val[3], singlePointColor.val[0], singlePointColor.val[1], singlePointColor.val[2], singlePointColor.val[3], matchesMask_mat.nativeObj, flags); return; } //javadoc: drawMatches(img1, keypoints1, img2, keypoints2, matches1to2, outImg) public static void drawMatches2(Mat img1, MatOfKeyPoint keypoints1, Mat img2, MatOfKeyPoint keypoints2, List<MatOfDMatch> matches1to2, Mat outImg) { Mat keypoints1_mat = keypoints1; Mat keypoints2_mat = keypoints2; List<Mat> matches1to2_tmplm = new ArrayList<Mat>((matches1to2 != null) ? matches1to2.size() : 0); Mat matches1to2_mat = Converters.vector_vector_DMatch_to_Mat(matches1to2, matches1to2_tmplm); drawMatches2_1(img1.nativeObj, keypoints1_mat.nativeObj, img2.nativeObj, keypoints2_mat.nativeObj, matches1to2_mat.nativeObj, outImg.nativeObj); return; } // C++: void drawKeypoints(Mat image, vector_KeyPoint keypoints, Mat outImage, Scalar color = Scalar::all(-1), int flags = 0) private static native void drawKeypoints_0(long image_nativeObj, long keypoints_mat_nativeObj, long outImage_nativeObj, double color_val0, double color_val1, double color_val2, double color_val3, int flags); private static native void drawKeypoints_1(long image_nativeObj, long keypoints_mat_nativeObj, long outImage_nativeObj); // C++: void drawMatches(Mat img1, vector_KeyPoint keypoints1, Mat img2, vector_KeyPoint keypoints2, vector_DMatch matches1to2, Mat outImg, Scalar matchColor = Scalar::all(-1), Scalar singlePointColor = Scalar::all(-1), vector_char matchesMask = std::vector<char>(), int flags = 0) private static native void drawMatches_0(long img1_nativeObj, long keypoints1_mat_nativeObj, long img2_nativeObj, long keypoints2_mat_nativeObj, long matches1to2_mat_nativeObj, long outImg_nativeObj, double matchColor_val0, double matchColor_val1, double matchColor_val2, double matchColor_val3, double singlePointColor_val0, double singlePointColor_val1, double singlePointColor_val2, double singlePointColor_val3, long matchesMask_mat_nativeObj, int flags); private static native void drawMatches_1(long img1_nativeObj, long keypoints1_mat_nativeObj, long img2_nativeObj, long keypoints2_mat_nativeObj, long matches1to2_mat_nativeObj, long outImg_nativeObj); // C++: void drawMatches(Mat img1, vector_KeyPoint keypoints1, Mat img2, vector_KeyPoint keypoints2, vector_vector_DMatch matches1to2, Mat outImg, Scalar matchColor = Scalar::all(-1), Scalar singlePointColor = Scalar::all(-1), vector_vector_char matchesMask = std::vector<std::vector<char> >(), int flags = 0) private static native void drawMatches2_0(long img1_nativeObj, long keypoints1_mat_nativeObj, long img2_nativeObj, long keypoints2_mat_nativeObj, long matches1to2_mat_nativeObj, long outImg_nativeObj, double matchColor_val0, double matchColor_val1, double matchColor_val2, double matchColor_val3, double singlePointColor_val0, double singlePointColor_val1, double singlePointColor_val2, double singlePointColor_val3, long matchesMask_mat_nativeObj, int flags); private static native void drawMatches2_1(long img1_nativeObj, long keypoints1_mat_nativeObj, long img2_nativeObj, long keypoints2_mat_nativeObj, long matches1to2_mat_nativeObj, long outImg_nativeObj); }
apache-2.0
pfirmstone/river-internet
qa/src/org/apache/river/test/spec/txnmanager/TwoPhaseTest.java
3246
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.river.test.spec.txnmanager; import java.util.logging.Level; import org.apache.river.mahalo.*; import net.jini.core.lease.*; import net.jini.core.transaction.*; import net.jini.core.transaction.server.*; import java.io.*; import java.rmi.*; // Test harness specific classes import org.apache.river.qa.harness.TestException; // Shared classes import org.apache.river.test.share.TxnManagerTest; import org.apache.river.test.share.TxnTestUtils; import org.apache.river.test.share.TestParticipant; import org.apache.river.test.share.TestParticipantImpl; public class TwoPhaseTest extends TxnManagerTest { public void run() throws Exception { TransactionManager mgr = null; Transaction.Created cr = null; TestParticipant part = null; startTxnMgr(); part = new TestParticipantImpl(); mgr = manager(); if (DEBUG) { logger.log(Level.INFO, "TwoPhaseTest: run: mgr = " + mgr); } cr = TransactionFactory.create(mgr, Lease.FOREVER); part.setBehavior(OP_JOIN); part.setBehavior(OP_VOTE_NOTCHANGED); part.behave(cr.transaction); cr.transaction.commit(); part.clearBehavior(OP_VOTE_NOTCHANGED); mgr = manager(); if (DEBUG) { logger.log(Level.INFO, "TwoPhaseTest: run: mgr = " + mgr); } cr = TransactionFactory.create(mgr, Lease.FOREVER); part.setBehavior(OP_JOIN); part.setBehavior(OP_VOTE_PREPARED); part.behave(cr.transaction); cr.transaction.commit(); part.clearBehavior(OP_VOTE_PREPARED); try { mgr = manager(); if (DEBUG) { logger.log(Level.INFO, "TwoPhaseTest: run: mgr = " + mgr); } cr = TransactionFactory.create(mgr, Lease.FOREVER); part.setBehavior(OP_JOIN); part.setBehavior(OP_VOTE_ABORTED); part.behave(cr.transaction); cr.transaction.commit(); throw new TestException("CannotCommitException is not raised"); } catch (CannotCommitException cce) { // Expected exception. Test passed. try { part.clearBehavior(OP_VOTE_ABORTED); } catch (RemoteException re1) { logger.log(Level.INFO, "TwoPhaseTest: run: " + re1.getMessage()); re1.printStackTrace(); } } } }
apache-2.0
smee/gradoop
gradoop-flink/src/main/java/org/gradoop/flink/model/impl/operators/subgraph/functions/SourceTargetIdGraphsTuple.java
1775
/* * Copyright © 2014 - 2018 Leipzig University (Database Research Group) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradoop.flink.model.impl.operators.subgraph.functions; import org.apache.flink.api.common.functions.FlatMapFunction; import org.apache.flink.api.java.functions.FunctionAnnotation; import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.util.Collector; import org.gradoop.common.model.impl.pojo.Edge; import org.gradoop.common.model.impl.id.GradoopId; import org.gradoop.common.model.impl.id.GradoopIdSet; /** * For each edge, collect two tuple 2 containing its source or target id in the * first field and all the graphs this edge is contained in in its second field. * * @param <E> epgm edge type */ @FunctionAnnotation.ReadFields("sourceId;targetId") @FunctionAnnotation.ForwardedFields("graphIds->f1") public class SourceTargetIdGraphsTuple<E extends Edge> implements FlatMapFunction<E, Tuple2<GradoopId, GradoopIdSet>> { @Override public void flatMap( E e, Collector<Tuple2<GradoopId, GradoopIdSet>> collector) throws Exception { collector.collect(new Tuple2<>(e.getSourceId(), e.getGraphIds())); collector.collect(new Tuple2<>(e.getTargetId(), e.getGraphIds())); } }
apache-2.0
davidraleigh/geometry-api-java
src/main/java/com/esri/core/geometry/AttributeStreamOfInt32.java
18938
/* Copyright 1995-2018 Esri Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. For additional information, contact: Environmental Systems Research Institute, Inc. Attn: Contracts Dept 380 New York Street Redlands, California, USA 92373 email: contracts@esri.com */ package com.esri.core.geometry; import com.esri.core.geometry.VertexDescription.Persistence; import java.io.IOException; import java.io.ObjectStreamException; import java.io.Serializable; import java.nio.ByteBuffer; import java.nio.IntBuffer; import java.util.Arrays; import static com.esri.core.geometry.SizeOf.SIZE_OF_ATTRIBUTE_STREAM_OF_INT32; import static com.esri.core.geometry.SizeOf.sizeOfIntArray; final class AttributeStreamOfInt32 extends AttributeStreamBase implements Serializable { private static final long serialVersionUID = 1L; transient private int[] m_buffer = null; private int m_size; public void reserve(int reserve) { if (reserve <= 0) return; if (m_buffer == null) m_buffer = new int[reserve]; else { if (reserve <= m_buffer.length) return; int[] buf = new int[reserve]; System.arraycopy(m_buffer, 0, buf, 0, m_buffer.length); m_buffer = buf; } } public int size() { return m_size; } public int capacity() { return m_buffer != null ? m_buffer.length : 0; } public AttributeStreamOfInt32(int size) { int sz = size; if (sz < 2) sz = 2; m_buffer = new int[sz]; m_size = size; } public AttributeStreamOfInt32(int size, int defaultValue) { int sz = size; if (sz < 2) sz = 2; m_buffer = new int[sz]; m_size = size; Arrays.fill(m_buffer, 0, size, defaultValue); // for (int i = 0; i < size; i++) // m_buffer[i] = defaultValue; } public AttributeStreamOfInt32(AttributeStreamOfInt32 other) { m_buffer = other.m_buffer.clone(); m_size = other.m_size; } public AttributeStreamOfInt32(AttributeStreamOfInt32 other, int maxSize) { m_size = other.size(); if (m_size > maxSize) m_size = maxSize; int sz = m_size; if (sz < 2) sz = 2; m_buffer = new int[sz]; System.arraycopy(other.m_buffer, 0, m_buffer, 0, m_size); } /** * Reads a value from the buffer at given offset. * * @param offset * is the element number in the stream. */ public int read(int offset) { return m_buffer[offset]; } public int get(int offset) { return m_buffer[offset]; } /** * Overwrites given element with new value. * * @param offset * is the element number in the stream. * @param value * is the value to write. */ public void write(int offset, int value) { if (m_bReadonly) { throw new RuntimeException("invalid_call"); } m_buffer[offset] = value; } public void set(int offset, int value) { if (m_bReadonly) { throw new RuntimeException("invalid_call"); } m_buffer[offset] = value; } /** * Adds a new value at the end of the stream. * * @param offset * is the element number in the stream. * @param value * is the value to write. */ public void add(int v) { resize(m_size + 1); m_buffer[m_size - 1] = v; } @Override public AttributeStreamBase restrictedClone(int maxsize) { int len = m_size; int newSize = maxsize < len ? maxsize : len; int[] newBuffer = new int[newSize]; System.arraycopy(m_buffer, 0, newBuffer, 0, newSize); m_buffer = newBuffer; m_size = newSize; return this; } @Override public int virtualSize() { return size(); } @Override public long estimateMemorySize() { return SIZE_OF_ATTRIBUTE_STREAM_OF_INT32 + sizeOfIntArray(m_buffer.length); } @Override public int getPersistence() { return Persistence.enumInt32; } @Override public double readAsDbl(int offset) { return read(offset); } @Override public int readAsInt(int offset) { return (int) read(offset); } @Override public long readAsInt64(int offset) { return (long) read(offset); } @Override public void resize(int newSize) { if (m_bLockedInSize) throw new GeometryException( "invalid call. Attribute Stream is locked and cannot be resized."); if (newSize <= m_size) { if ((newSize * 5) / 4 < m_buffer.length) {// decrease when the 25% // margin is exceeded int[] newBuffer = new int[newSize]; System.arraycopy(m_buffer, 0, newBuffer, 0, newSize); m_buffer = newBuffer; } m_size = newSize; } else { if (newSize > m_buffer.length) { int sz = (newSize < 64) ? Math.max(newSize * 2, 4) : (newSize * 5) / 4; int[] newBuffer = new int[sz]; System.arraycopy(m_buffer, 0, newBuffer, 0, m_size); m_buffer = newBuffer; } m_size = newSize; } } @Override public void resizePreserveCapacity(int newSize)// java only method { if (m_buffer == null || newSize > m_buffer.length) resize(newSize); if (m_bLockedInSize) throw new GeometryException( "invalid call. Attribute Stream is locked and cannot be resized."); m_size = newSize; } @Override public void resize(int newSize, double defaultValue) { if (m_bLockedInSize) throw new GeometryException( "invalid call. Attribute Stream is locked and cannot be resized."); if (newSize <= m_size) { if ((newSize * 5) / 4 < m_buffer.length) {// decrease when the 25% // margin is exceeded int[] newBuffer = new int[newSize]; System.arraycopy(m_buffer, 0, newBuffer, 0, newSize); m_buffer = newBuffer; } m_size = newSize; } else { if (newSize > m_buffer.length) { int sz = (newSize < 64) ? Math.max(newSize * 2, 4) : (newSize * 5) / 4; int[] newBuffer = new int[sz]; System.arraycopy(m_buffer, 0, newBuffer, 0, m_size); m_buffer = newBuffer; } Arrays.fill(m_buffer, m_size, newSize, (int) defaultValue); // for (int i = m_size; i < newSize; i++) // m_buffer[i] = (int)defaultValue; m_size = newSize; } } @Override public void writeAsDbl(int offset, double d) { write(offset, (int) d); } @Override public void writeAsInt64(int offset, long d) { write(offset, (int) d); } @Override public void writeAsInt(int offset, int d) { write(offset, (int) d); } // @Override // public void writeRange(int srcStart, int count, ByteBuffer dst, // int dstOffsetBytes) { // // TODO Auto-generated method stub // // } @Override public int calculateHashImpl(int hashCode, int start, int end) { for (int i = start, n = size(); i < n && i < end; i++) hashCode = NumberUtils.hash(hashCode, read(i)); return hashCode; } @Override public boolean equals(AttributeStreamBase other, int start, int end) { if (other == null) return false; if (!(other instanceof AttributeStreamOfInt32)) return false; AttributeStreamOfInt32 _other = (AttributeStreamOfInt32) other; int size = size(); int sizeOther = _other.size(); if (end > size || end > sizeOther && (size != sizeOther)) return false; if (end > size) end = size; for (int i = start; i < end; i++) if (read(i) != _other.read(i)) return false; return true; } public int getLast() { return m_buffer[m_size - 1]; } public void setLast(int v) { m_buffer[m_size - 1] = v; } public void removeLast() { resize(m_size - 1); } // Finds element in the unsorted array and returns its index. Returns -1 if // the element could not be found. int findElement(int value) { for (int i = 0, n = m_size; i < n; i++) { if (m_buffer[i] == value) return i; } return -1; } // Returns True if element could be found in the array. boolean hasElement(int value) { return findElement(value) >= 0; } // Removes the element from the array in constant time. // It moves the last element of the array to the index and decrements the // array size by 1. void popElement(int index) { assert (index >= 0 && index < m_size); if (index < m_size - 1) { m_buffer[index] = m_buffer[m_size - 1]; } resize(m_size - 1); } @Override public void addRange(AttributeStreamBase src, int start, int count, boolean bForward, int stride) { if (m_bReadonly) throw new GeometryException("invalid_call"); if (!bForward && (stride < 1 || count % stride != 0)) throw new IllegalArgumentException(); int oldSize = m_size; int newSize = oldSize + count; resize(newSize); if (bForward) { System.arraycopy(((AttributeStreamOfInt32) src).m_buffer, start, m_buffer, oldSize, count); } else { int n = count; for (int i = 0; i < count; i += stride) { n -= stride; for (int s = 0; s < stride; s++) { m_buffer[oldSize + i + s] = ((AttributeStreamOfInt32) src).m_buffer[start + n + s]; } } } } @Override public void insertRange(int start, AttributeStreamBase src, int srcStart, int count, boolean bForward, int stride, int validSize) { if (m_bReadonly) throw new GeometryException("invalid_call"); if (!bForward && (stride < 1 || count % stride != 0)) throw new IllegalArgumentException(); System.arraycopy(m_buffer, start, m_buffer, start + count, validSize - start); if (m_buffer == ((AttributeStreamOfInt32) src).m_buffer) { if (start < srcStart) srcStart += count; } if (bForward) { System.arraycopy(((AttributeStreamOfInt32) src).m_buffer, srcStart, m_buffer, start, count); } else { int n = count; for (int i = 0; i < count; i += stride) { n -= stride; for (int s = 0; s < stride; s++) { m_buffer[start + i + s] = ((AttributeStreamOfInt32) src).m_buffer[srcStart + n + s]; } } } } @Override public void insertRange(int start, double value, int count, int validSize) { if (m_bReadonly) throw new GeometryException("invalid_call"); System.arraycopy(m_buffer, start, m_buffer, start + count, validSize - start); int v = (int) value; Arrays.fill(m_buffer, start, start + count, v); // for (int i = 0; i < count; i++) // { // m_buffer[start + i] = v; // } } @Override public void insertAttributes(int start, Point pt, int semantics, int validSize) { if (m_bReadonly) throw new GeometryException("invalid_call"); int comp = VertexDescription.getComponentCount(semantics); System.arraycopy(m_buffer, start, m_buffer, start + comp, validSize - start); for (int c = 0; c < comp; c++) { m_buffer[start + c] = (int) pt.getAttributeAsDbl(semantics, c); } } @Override public void eraseRange(int index, int count, int validSize) { if (m_bReadonly) throw new GeometryException("invalid_call"); if (index + count > m_size) throw new GeometryException("invalid_call"); System.arraycopy(m_buffer, index + count, m_buffer, index, validSize - (index + count)); m_size -= count; } @Override public void readRange(int srcStart, int count, ByteBuffer dst, int dstOffset, boolean bForward) { if (srcStart < 0 || count < 0 || dstOffset < 0 || size() < count + srcStart) throw new IllegalArgumentException(); final int elmSize = NumberUtils.sizeOf((double) 0); if (dst.capacity() < (int) (dstOffset + elmSize * count)) throw new IllegalArgumentException(); if (count == 0) return; int j = srcStart; if (!bForward) j += count - 1; final int dj = bForward ? 1 : -1; int offset = dstOffset; for (int i = 0; i < count; i++, offset += elmSize) { dst.putInt(offset, m_buffer[j]); j += dj; } } @Override public void reverseRange(int index, int count, int stride) { if (m_bReadonly) throw new GeometryException("invalid_call"); if (stride < 1 || count % stride != 0) throw new GeometryException("invalid_call"); int cIterations = count >> 1; int n = count; for (int i = 0; i < cIterations; i += stride) { n -= stride; for (int s = 0; s < stride; s++) { int temp = m_buffer[index + i + s]; m_buffer[index + i + s] = m_buffer[index + n + s]; m_buffer[index + n + s] = temp; } } } @Override public void setRange(double value, int start, int count) { if (start < 0 || count < 0 || start < 0 || count + start > size()) throw new IllegalArgumentException(); int v = (int) value; Arrays.fill(m_buffer, start, start + count, v); // for (int i = start, n = start + count; i < n; i++) // write(i, v); } @Override public void writeRange(int startElement, int count, AttributeStreamBase _src, int srcStart, boolean bForward, int stride) { if (startElement < 0 || count < 0 || srcStart < 0) throw new IllegalArgumentException(); if (!bForward && (stride <= 0 || (count % stride != 0))) throw new IllegalArgumentException(); AttributeStreamOfInt32 src = (AttributeStreamOfInt32) _src; // the input // type must // match if (src.size() < (int) (srcStart + count)) throw new IllegalArgumentException(); if (count == 0) return; if (size() < count + startElement) resize(count + startElement); if (_src == (AttributeStreamBase) this) { _selfWriteRangeImpl(startElement, count, srcStart, bForward, stride); return; } if (bForward) { System.arraycopy(src.m_buffer, srcStart, m_buffer, startElement, count); // int j = startElement; // int offset = srcStart; // for (int i = 0; i < count; i++) // { // m_buffer[j] = src.m_buffer[offset]; // j++; // offset++; // } } else { int j = startElement; int offset = srcStart + count - stride; if (stride == 1) { for (int i = 0; i < count; i++) { m_buffer[j] = src.m_buffer[offset]; j++; offset--; } } else { for (int i = 0, n = count / stride; i < n; i++) { for (int k = 0; k < stride; k++) m_buffer[j + k] = src.m_buffer[offset + k]; j += stride; offset -= stride; } } } } private void _selfWriteRangeImpl(int toElement, int count, int fromElement, boolean bForward, int stride) { // writing from to this stream. if (bForward) { if (toElement == fromElement) return; } System.arraycopy(m_buffer, fromElement, m_buffer, toElement, count); if (bForward) return; // reverse what we written int j = toElement; int offset = toElement + count - stride; for (int i = 0, n = count / 2; i < n; i++) { for (int k = 0; k < stride; k++) { int v = m_buffer[j + k]; m_buffer[j + k] = m_buffer[offset + k]; m_buffer[offset + k] = v; } j += stride; offset -= stride; } } @Override public void writeRange(int startElement, int count, ByteBuffer src, int offsetBytes, boolean bForward) { if (startElement < 0 || count < 0 || offsetBytes < 0) throw new IllegalArgumentException(); final int elmSize = NumberUtils.sizeOf((double) 0); if (src.capacity() < (int) (offsetBytes + elmSize * count)) throw new IllegalArgumentException(); if (count == 0) return; if (size() < count + startElement) resize(count + startElement); int j = startElement; if (!bForward) j += count - 1; final int dj = bForward ? 1 : -1; int offset = offsetBytes; for (int i = 0; i < count; i++, offset += elmSize) { m_buffer[j] = src.getInt(offset); j += dj; } } static public abstract class IntComparator { public abstract int compare(int v1, int v2); }; static class RandomSeed { public int random; public RandomSeed() { random = 1973; } } public void Sort(int start, int end, IntComparator compare) { if (end - start < 10) insertionsort(start, end, compare); else { quicksort(start, end - 1, compare, new RandomSeed()); } } void insertionsort(int start, int end, IntComparator compare) { for (int j = start; j < end; j++)// insertion sort { int key = m_buffer[j]; int i = j - 1; while (i >= start && compare.compare(m_buffer[i], key) > 0) { m_buffer[i + 1] = m_buffer[i]; i--; } m_buffer[i + 1] = key; } } void swap(int left, int right) { int tmp = m_buffer[right]; m_buffer[right] = m_buffer[left]; m_buffer[left] = tmp; } void quicksort(int leftIn, int rightIn, IntComparator compare, RandomSeed seed) { if (leftIn >= rightIn) return; int left = leftIn; int right = rightIn; while (true)// tail recursion loop { if (right - left < 9) { insertionsort(left, right + 1, compare); return; } // Select random index for the pivot seed.random = NumberUtils.nextRand(seed.random); long nom = ((long) (right - left)) * seed.random; int pivotIndex = (int) (nom / NumberUtils.intMax()) + left; // Get the pivot value int pivotValue = m_buffer[pivotIndex]; // Start partition // Move pivot to the right swap(pivotIndex, right); int storeIndex = left; for (int i = left; i < right; i++) { int elm = m_buffer[i]; if (compare.compare(elm, pivotValue) <= 0) { swap(storeIndex, i); storeIndex = storeIndex + 1; } } // Move pivot to its final place swap(storeIndex, right); // End partition // Shorter part is regular recursion // Longer part is tail recursion if (storeIndex - left < right - storeIndex) { quicksort(left, storeIndex - 1, compare, seed); left = storeIndex + 1; } else { quicksort(storeIndex + 1, right, compare, seed); right = storeIndex - 1; } } } public void sort(int start, int end) { Arrays.sort(m_buffer, start, end); } private void writeObject(java.io.ObjectOutputStream stream) throws IOException { stream.defaultWriteObject(); IntBuffer intBuf = null; byte[] bytes = null; for (int i = 0; i < m_size;) { int n = Math.min(32, m_size - i); if (bytes == null) { bytes = new byte[n * 4]; //32 elements at a time ByteBuffer buf = ByteBuffer.wrap(bytes); intBuf = buf.asIntBuffer(); } intBuf.rewind(); intBuf.put(m_buffer, i, n); stream.write(bytes, 0, n * 4); i += n; } } private void readObject(java.io.ObjectInputStream stream) throws IOException, ClassNotFoundException { stream.defaultReadObject(); m_buffer = new int[m_size]; IntBuffer intBuf = null; byte[] bytes = null; for (int i = 0; i < m_size;) { int n = Math.min(32, m_size - i); if (bytes == null) { bytes = new byte[n * 4]; //32 elements at a time ByteBuffer buf = ByteBuffer.wrap(bytes); intBuf = buf.asIntBuffer(); } stream.read(bytes, 0, n * 4); intBuf.rewind(); intBuf.get(m_buffer, i, n); i += n; } } @SuppressWarnings("unused") private void readObjectNoData() throws ObjectStreamException { m_buffer = new int[2]; m_size = 0; } }
apache-2.0
duncangrant/clocker
docker/src/main/java/brooklyn/location/docker/strategy/affinity/AffinityRules.java
9557
/* * Copyright 2014 by Cloudsoft Corporation Limited * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package brooklyn.location.docker.strategy.affinity; import java.util.List; import java.util.Locale; import java.util.Queue; import javax.annotation.Nullable; import brooklyn.config.ConfigKey; import brooklyn.entity.Entity; import brooklyn.entity.basic.ConfigKeys; import brooklyn.entity.basic.EntityPredicates; import brooklyn.location.docker.DockerHostLocation; import brooklyn.util.javalang.Reflections; import com.google.common.base.CharMatcher; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.common.base.Predicates; import com.google.common.base.Splitter; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Queues; import com.google.common.reflect.TypeToken; /** * Affinity rules for Docker hosts. * <p> * Rules are specified as strings, formatted as follows: * <ul> * <li>(<code>NOT</code>) <code>TYPE</code> <em>entityType</em>? * <li>(<code>NOT</code>) <code>NAME</code> <em>entityName</em> * <li>(<code>NOT</code>) <code>ID</code> <em>entityId</em>? * <li>(<code>NOT</code>) <code>APPLICATION</code> <em>applicationId</em>? * <li>(<code>NOT</code>) <code>PREDICATE</code> <em>entityPredicateClass</em> * <li>(<code>NOT</code>) <code>EMPTY</code> * </ul> * The <code>SAME</code> token is the default behaviour, and means the entities must have the property defined in the rule, <code>NOT</code> * means they mustn't have the property. The parameter given specifies the type or id, and if it's missing thee rule will apply to the * properties of the entity being placed. Rules that take a class name will instantiate an instance of that class from the current * classpath, so ensure the appropriate Jar files are available. The <code>EMPTY</code> rule will treaty empty locations as allowable, * otherwise a new {@link DockerHostLocation} will be created for the container. * <p> * To specify a rule that there must be no entities of the same type, an entity of type SolrServer, all in the same application, * use these rules: * <pre> * NOT TYPE * TYPE brooklyn.entity.nosql.solr.SolrServer * SAME APPLICATION * </pre> * <p> * Specify the rules during configuration using the {@link #AFFINITY_RULES} key: * <pre> * - serviceType: brooklyn.entity.webapp.tomcat.TomcatServer * brooklyn.config: * affinity.rules: * - "NOT TYPE" * - "TYPE brooklyn.entity.nosql.solr.SolrServer" * - "APPLICATION" * - $brooklyn:formatString("NOT ID %s", $brooklyn:entity("name")) * </pre> */ public class AffinityRules implements Predicate<Entity> { public static final ConfigKey<List<String>> AFFINITY_RULES = ConfigKeys.newConfigKey( new TypeToken<List<String>>() { }, "affinity.rules", "Affinity rules for entity placemnent"); public static final String NOT = "NOT"; public static final String TYPE = "TYPE"; public static final String NAME = "NAME"; public static final String ID = "ID"; public static final String APPLICATION = "APPLICATION"; public static final String PREDICATE = "PREDICATE"; public static final String EMPTY = "EMPTY"; public static final Iterable<String> VERBS = ImmutableList.of(TYPE, NAME, ID, APPLICATION, PREDICATE, EMPTY); private Predicate<Entity> affinityRules = Predicates.alwaysTrue(); private boolean allowEmpty = true; private final Entity entity; private AffinityRules(Entity entity) { this.entity = entity; } public static AffinityRules rulesFor(Entity entity) { return new AffinityRules(entity); } public AffinityRules parse(String...rules) { return parse(ImmutableList.copyOf(rules)); } public AffinityRules parse(String rules) { return parse(Splitter.on(CharMatcher.anyOf("\n,")).omitEmptyStrings().split(rules)); } public AffinityRules parse(Iterable<String> rules) { List<Predicate<Entity>> predicates = Lists.newArrayList(); for (String rule : rules) { Predicate<Entity> predicate = predicate(rule); predicates.add(predicate); } affinityRules = Predicates.and(predicates); return this; } private Predicate<Entity> predicate(String rule) { Preconditions.checkNotNull(rule, "rule"); Queue<String> tokens = Queues.newArrayDeque(Splitter.on(CharMatcher.WHITESPACE) .omitEmptyStrings() .splitToList(rule)); boolean same = true; Predicate<Entity> predicate = Predicates.alwaysTrue(); // Check first token for special values String first = tokens.peek(); if (first.equalsIgnoreCase(NOT)) { same = false; tokens.remove(); } // Check verb String verb = tokens.peek(); if (verb == null) { throw new IllegalStateException("Affinity rule verb not specified: " + rule); } else { if (Iterables.contains(VERBS, verb.toUpperCase(Locale.ENGLISH))) { tokens.remove(); } else { throw new IllegalStateException("Affinity rule parser found unexpected verb token: " + verb); } } // Check paramater and instantiate if required final String parameter = tokens.peek(); if (parameter == null) { if (verb.equalsIgnoreCase(EMPTY)) { allowEmpty = same; tokens.remove(); if (tokens.isEmpty()) { return predicate; } else { throw new IllegalStateException("Affinity rule has extra tokens: " + rule); } } else if (verb.equalsIgnoreCase(TYPE)) { predicate = new Predicate<Entity>() { @Override public boolean apply(@Nullable Entity input) { return input.getEntityType().getName().equalsIgnoreCase(entity.getEntityType().getName()) || input.getEntityType().getSimpleName().equalsIgnoreCase(entity.getEntityType().getSimpleName()); } }; } else if (verb.equalsIgnoreCase(ID)) { predicate = EntityPredicates.idEqualTo(entity.getId()); } else if (verb.equalsIgnoreCase(APPLICATION)) { predicate = EntityPredicates.applicationIdEqualTo(entity.getApplicationId()); } else { throw new IllegalStateException("Affinity rule parameter not specified: " + rule); } } else { tokens.remove(); if (verb.equalsIgnoreCase(TYPE)) { predicate = new Predicate<Entity>() { @Override public boolean apply(@Nullable Entity input) { return input.getEntityType().getName().equalsIgnoreCase(parameter) || input.getEntityType().getSimpleName().equalsIgnoreCase(parameter); } }; } else if (verb.equalsIgnoreCase(NAME)) { predicate = new Predicate<Entity>() { @Override public boolean apply(@Nullable Entity input) { return input.getDisplayName().toLowerCase(Locale.ENGLISH).contains(parameter.toLowerCase(Locale.ENGLISH)); } }; } else if (verb.equalsIgnoreCase(ID)) { predicate = EntityPredicates.idEqualTo(parameter); } else if (verb.equalsIgnoreCase(APPLICATION)) { predicate = EntityPredicates.applicationIdEqualTo(parameter); } else if (verb.equalsIgnoreCase(PREDICATE)) { try { Class<?> clazz = Class.forName(parameter); if (Reflections.hasNoArgConstructor(clazz)) { predicate = (Predicate<Entity>) Reflections.invokeConstructorWithArgs(clazz); } else { throw new IllegalStateException("Could not instantiate predicate: " + parameter); } } catch (ClassNotFoundException e) { throw new IllegalStateException("Could not find predicate: " + parameter); } } } // Check for left-over tokens if (tokens.peek() != null) { throw new IllegalStateException("Affinity rule has extra tokens: " + rule); } // Create predicate and return if (same) { return predicate; } else { return Predicates.not(predicate); } } @Override public boolean apply(@Nullable Entity input) { return affinityRules.apply(input); } public boolean allowEmptyLocations() { return allowEmpty; } }
apache-2.0
tsib0/AWReporting
src/main/java/com/google/api/ads/adwords/awreporting/processors/onfile/ReportProcessorOnFile.java
13642
// Copyright 2012 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.api.ads.adwords.awreporting.processors.onfile; import com.google.api.ads.adwords.awreporting.downloader.MultipleClientReportDownloader; import com.google.api.ads.adwords.awreporting.model.csv.AnnotationBasedMappingStrategy; import com.google.api.ads.adwords.awreporting.model.entities.Report; import com.google.api.ads.adwords.awreporting.model.util.ModifiedCsvToBean; import com.google.api.ads.adwords.awreporting.processors.ReportProcessor; import com.google.api.ads.adwords.awreporting.util.AdWordsSessionBuilderSynchronizer; import com.google.api.ads.adwords.lib.jaxb.v201502.ReportDefinition; import com.google.api.ads.adwords.lib.jaxb.v201502.ReportDefinitionDateRangeType; import com.google.api.ads.adwords.lib.jaxb.v201502.ReportDefinitionReportType; import com.google.api.ads.common.lib.exception.ValidationException; import com.google.common.base.Stopwatch; import com.google.common.collect.Lists; import org.apache.log4j.Logger; import org.joda.time.DateTime; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Component; import au.com.bytecode.opencsv.bean.MappingStrategy; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.Collection; import java.util.List; import java.util.Properties; import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; /** * Main reporting processor responsible for downloading and saving the files to the file system. The * persistence of the parsed beans is delegated to the configured persister. * * @author jtoledo@google.com (Julian Toledo) * @author gustavomoreira@google.com (Gustavo Moreira) */ @Component @Qualifier("reportProcessorOnFile") public class ReportProcessorOnFile extends ReportProcessor { private static final Logger LOGGER = Logger.getLogger(ReportProcessorOnFile.class); private static final DateFormat TIMESTAMPFORMAT = new SimpleDateFormat("yyyy-MM-dd-HH_mm"); private MultipleClientReportDownloader multipleClientReportDownloader; /** * Constructor. * * @param reportRowsSetSize the size of the set parsed before send to the DB * @param numberOfReportProcessors the number of numberOfReportProcessors threads to be run */ @Autowired public ReportProcessorOnFile( @Value(value = "${aw.report.processor.rows.size:}") Integer reportRowsSetSize, @Value(value = "${aw.report.processor.threads:}") Integer numberOfReportProcessors) { if (reportRowsSetSize != null && reportRowsSetSize > 0) { this.reportRowsSetSize = reportRowsSetSize; } if (numberOfReportProcessors != null && numberOfReportProcessors > 0) { this.numberOfReportProcessors = numberOfReportProcessors; } } private <R extends Report> void processFiles(String mccAccountId, Class<R> reportBeanClass, Collection<File> localFiles, ReportDefinitionDateRangeType dateRangeType, String dateStart, String dateEnd) { final CountDownLatch latch = new CountDownLatch(localFiles.size()); ExecutorService executorService = Executors.newFixedThreadPool(numberOfReportProcessors); // Processing Report Local Files LOGGER.info(" Processing reports..."); Stopwatch stopwatch = Stopwatch.createStarted(); for (File file : localFiles) { LOGGER.trace("."); try { // We need to create a csvToBean and mappingStrategy for each thread ModifiedCsvToBean<R> csvToBean = new ModifiedCsvToBean<R>(); MappingStrategy<R> mappingStrategy = new AnnotationBasedMappingStrategy<R>(reportBeanClass); LOGGER.debug("Parsing file: " + file.getAbsolutePath()); RunnableProcessorOnFile<R> runnableProcesor = new RunnableProcessorOnFile<R>(file, csvToBean, mappingStrategy, dateRangeType, dateStart, dateEnd, mccAccountId, persister, reportRowsSetSize); runnableProcesor.setLatch(latch); executorService.execute(runnableProcesor); } catch (Exception e) { LOGGER.error("Ignoring file (Error when processing): " + file.getAbsolutePath()); e.printStackTrace(); } } try { latch.await(); } catch (InterruptedException e) { LOGGER.error(e.getMessage()); e.printStackTrace(); } executorService.shutdown(); stopwatch.stop(); LOGGER.info("*** Finished processing all reports in " + (stopwatch.elapsed(TimeUnit.MILLISECONDS) / 1000) + " seconds ***\n"); } /** * Caches the accounts into a temporary file. * * @param accountIdsSet the set with all the accounts */ @Override protected void cacheAccounts(Set<Long> accountIdsSet) { DateTime now = new DateTime(); String nowFormat = TIMESTAMPFORMAT.format(now.toDate()); try { File tempFile = File.createTempFile(nowFormat + "-accounts-ids", ".txt"); LOGGER.info("Cache file created for accounts: " + tempFile.getAbsolutePath()); FileWriter writer = new FileWriter(tempFile); for (Long accountId : accountIdsSet) { writer.write(Long.toString(accountId) + "\n"); } writer.close(); LOGGER.info("All account IDs added to cache file."); } catch (IOException e) { LOGGER.error("Could not create temporary file with the accounts. Accounts won't be cached."); e.printStackTrace(); } } /** * Generate all the mapped reports to the given account IDs. * * @param dateRangeType the date range type. * @param dateStart the starting date. * @param dateEnd the ending date. * @param accountIdsSet the account IDs. * @param properties the properties file * @throws Exception error reaching the API. */ @Override public void generateReportsForMCC(String mccAccountId, ReportDefinitionDateRangeType dateRangeType, String dateStart, String dateEnd, Set<Long> accountIdsSet, Properties properties, ReportDefinitionReportType onDemandReportType, List<String> reportFieldsToInclude) throws Exception { LOGGER.info("*** Retrieving account IDs ***"); if (accountIdsSet == null || accountIdsSet.size() == 0) { accountIdsSet = this.retrieveAccountIds(mccAccountId); } else { LOGGER.info("Accounts loaded from file."); } AdWordsSessionBuilderSynchronizer sessionBuilder = new AdWordsSessionBuilderSynchronizer( authenticator.authenticate(mccAccountId, false)); LOGGER.info("*** Generating Reports for " + accountIdsSet.size() + " accounts ***"); Stopwatch stopwatch = Stopwatch.createStarted(); Set<ReportDefinitionReportType> reports = this.csvReportEntitiesMapping.getDefinedReports(); // reports Set<Object> propertiesKeys = properties.keySet(); for (Object key : propertiesKeys) { String reportDefinitionKey = key.toString(); ReportDefinitionReportType reportType = this.extractReportTypeFromKey(reportDefinitionKey); if (reportType != null && reports.contains(reportType)) { this.downloadAndProcess(mccAccountId, sessionBuilder, reportType, dateRangeType, dateStart, dateEnd, accountIdsSet, reportDefinitionKey, properties); } } this.multipleClientReportDownloader.finalizeExecutorService(); stopwatch.stop(); LOGGER.info("*** Finished processing all reports in " + (stopwatch.elapsed(TimeUnit.MILLISECONDS) / 1000) + " seconds ***\n"); } /** * Downloads all the files from the API and process all the rows, saving the data to the * configured data base. * * @param sessionBuilder the session builder. * @param reportType the report type. * @param dateRangeType the date range type. * @param dateStart the start date. * @param dateEnd the ending date. * @param acountIdList the account IDs. * @param properties the properties resource. */ private <R extends Report> void downloadAndProcess(String mccAccountId, AdWordsSessionBuilderSynchronizer sessionBuilder, ReportDefinitionReportType reportType, ReportDefinitionDateRangeType dateRangeType, String dateStart, String dateEnd, Set<Long> acountIdList, String reportDefinitionKey, Properties properties) { // Download Reports to local files and Generate Report objects LOGGER.info("\n\n ** Generating: " + reportType.name() + " **"); LOGGER.info(" Downloading reports..."); Collection<File> localFiles = Lists.newArrayList(); try { ReportDefinition reportDefinition = getReportDefinition(reportType, dateRangeType, dateStart, dateEnd, reportDefinitionKey, properties); localFiles = this.multipleClientReportDownloader.downloadReports(sessionBuilder, reportDefinition, acountIdList); } catch (InterruptedException e) { LOGGER.error(e.getMessage()); e.printStackTrace(); return; } catch (ValidationException e) { LOGGER.error(e.getMessage()); e.printStackTrace(); return; } this.processLocalFiles(mccAccountId, reportType, localFiles, dateStart, dateEnd, dateRangeType); this.deleteTemporaryFiles(localFiles, reportType); } /** * Process the local files delegating the call to the concrete implementation. * * @param reportType the report type. * @param localFiles the local files. * @param dateStart the start date. * @param dateEnd the end date. * @param dateRangeType the date range type. */ private <R extends Report> void processLocalFiles(String mccAccountId, ReportDefinitionReportType reportType, Collection<File> localFiles, String dateStart, String dateEnd, ReportDefinitionDateRangeType dateRangeType) { Stopwatch stopwatch = Stopwatch.createStarted(); @SuppressWarnings("unchecked") Class<R> reportBeanClass = (Class<R>) this.csvReportEntitiesMapping.getReportBeanClass(reportType); this.processFiles(mccAccountId, reportBeanClass, localFiles, dateRangeType, dateStart, dateEnd); stopwatch.stop(); LOGGER.info("\n* DB Process finished in " + (stopwatch.elapsed(TimeUnit.MILLISECONDS) / 1000) + " seconds ***"); } /** * Process the local files delegating the call to the concrete implementation. * * @param reportTypeName the report type name as String. * @param localFiles the local files. * @param dateStart the start date. * @param dateEnd the end date. * @param dateRangeType the date range type. */ @SuppressWarnings("unchecked") public <R extends Report> void processLocalFiles(String mccAccountId, String reportTypeName, Collection<File> localFiles, String dateStart, String dateEnd, ReportDefinitionDateRangeType dateRangeType) { Stopwatch stopwatch = Stopwatch.createStarted(); Class<R> reportBeanClass; try { ReportDefinitionReportType reportType = ReportDefinitionReportType.valueOf(reportTypeName); reportBeanClass = (Class<R>) this.csvReportEntitiesMapping.getReportBeanClass(reportType); } catch (IllegalArgumentException e) { reportBeanClass = (Class<R>) this.csvReportEntitiesMapping.getExperimentalReportBeanClass(reportTypeName); } if (reportBeanClass == null) { throw new IllegalArgumentException("Report type not found: " + reportTypeName); } this.processFiles(mccAccountId, reportBeanClass, localFiles, dateRangeType, dateStart, dateEnd); stopwatch.stop(); LOGGER.info("\n* DB Process finished in " + (stopwatch.elapsed(TimeUnit.MILLISECONDS) / 1000) + " seconds ***"); } /** * Deletes the local files used as temporary containers. * * @param localFiles the list of local files. * @param reportType the report type. */ private void deleteTemporaryFiles(Collection<File> localFiles, ReportDefinitionReportType reportType) { // Delete temporary report files LOGGER.info("\n Deleting temporary report files after Parsing..."); for (File file : localFiles) { File gUnzipFile = new File(file.getAbsolutePath() + ".gunzip"); gUnzipFile.delete(); file.delete(); LOGGER.trace("."); } LOGGER.info("\n ** Finished: " + reportType.name() + " **"); } /** * @param multipleClientReportDownloader the multipleClientReportDownloader to set */ @Autowired public void setMultipleClientReportDownloader( MultipleClientReportDownloader multipleClientReportDownloader) { this.multipleClientReportDownloader = multipleClientReportDownloader; } }
apache-2.0
intel-analytics/BigDL
scala/friesian/src/main/java/com/intel/analytics/bigdl/friesian/serving/recall/faiss/swighnswlib/MatrixStats.java
9240
/* ---------------------------------------------------------------------------- * This file was automatically generated by SWIG (http://www.swig.org). * Version 3.0.12 * * Do not make changes to this file unless you know what you are doing--modify * the SWIG interface file instead. * ----------------------------------------------------------------------------- */ package com.intel.analytics.bigdl.friesian.serving.recall.faiss.swighnswlib; public class MatrixStats { private transient long swigCPtr; protected transient boolean swigCMemOwn; protected MatrixStats(long cPtr, boolean cMemoryOwn) { swigCMemOwn = cMemoryOwn; swigCPtr = cPtr; } protected static long getCPtr(MatrixStats obj) { return (obj == null) ? 0 : obj.swigCPtr; } protected void finalize() { delete(); } public synchronized void delete() { if (swigCPtr != 0) { if (swigCMemOwn) { swigCMemOwn = false; swigfaissJNI.delete_MatrixStats(swigCPtr); } swigCPtr = 0; } } public MatrixStats(long n, long d, SWIGTYPE_p_float x) { this(swigfaissJNI.new_MatrixStats(n, d, SWIGTYPE_p_float.getCPtr(x)), true); } public void setComments(String value) { swigfaissJNI.MatrixStats_comments_set(swigCPtr, this, value); } public String getComments() { return swigfaissJNI.MatrixStats_comments_get(swigCPtr, this); } public void setN(long value) { swigfaissJNI.MatrixStats_n_set(swigCPtr, this, value); } public long getN() { return swigfaissJNI.MatrixStats_n_get(swigCPtr, this); } public void setD(long value) { swigfaissJNI.MatrixStats_d_set(swigCPtr, this, value); } public long getD() { return swigfaissJNI.MatrixStats_d_get(swigCPtr, this); } public void setN_collision(long value) { swigfaissJNI.MatrixStats_n_collision_set(swigCPtr, this, value); } public long getN_collision() { return swigfaissJNI.MatrixStats_n_collision_get(swigCPtr, this); } public void setN_valid(long value) { swigfaissJNI.MatrixStats_n_valid_set(swigCPtr, this, value); } public long getN_valid() { return swigfaissJNI.MatrixStats_n_valid_get(swigCPtr, this); } public void setN0(long value) { swigfaissJNI.MatrixStats_n0_set(swigCPtr, this, value); } public long getN0() { return swigfaissJNI.MatrixStats_n0_get(swigCPtr, this); } public void setMin_norm2(double value) { swigfaissJNI.MatrixStats_min_norm2_set(swigCPtr, this, value); } public double getMin_norm2() { return swigfaissJNI.MatrixStats_min_norm2_get(swigCPtr, this); } public void setMax_norm2(double value) { swigfaissJNI.MatrixStats_max_norm2_set(swigCPtr, this, value); } public double getMax_norm2() { return swigfaissJNI.MatrixStats_max_norm2_get(swigCPtr, this); } static public class PerDimStats { private transient long swigCPtr; protected transient boolean swigCMemOwn; protected PerDimStats(long cPtr, boolean cMemoryOwn) { swigCMemOwn = cMemoryOwn; swigCPtr = cPtr; } protected static long getCPtr(PerDimStats obj) { return (obj == null) ? 0 : obj.swigCPtr; } protected void finalize() { delete(); } public synchronized void delete() { if (swigCPtr != 0) { if (swigCMemOwn) { swigCMemOwn = false; swigfaissJNI.delete_MatrixStats_PerDimStats(swigCPtr); } swigCPtr = 0; } } public void setN(long value) { swigfaissJNI.MatrixStats_PerDimStats_n_set(swigCPtr, this, value); } public long getN() { return swigfaissJNI.MatrixStats_PerDimStats_n_get(swigCPtr, this); } public void setN_nan(long value) { swigfaissJNI.MatrixStats_PerDimStats_n_nan_set(swigCPtr, this, value); } public long getN_nan() { return swigfaissJNI.MatrixStats_PerDimStats_n_nan_get(swigCPtr, this); } public void setN_inf(long value) { swigfaissJNI.MatrixStats_PerDimStats_n_inf_set(swigCPtr, this, value); } public long getN_inf() { return swigfaissJNI.MatrixStats_PerDimStats_n_inf_get(swigCPtr, this); } public void setN0(long value) { swigfaissJNI.MatrixStats_PerDimStats_n0_set(swigCPtr, this, value); } public long getN0() { return swigfaissJNI.MatrixStats_PerDimStats_n0_get(swigCPtr, this); } public void setMin(float value) { swigfaissJNI.MatrixStats_PerDimStats_min_set(swigCPtr, this, value); } public float getMin() { return swigfaissJNI.MatrixStats_PerDimStats_min_get(swigCPtr, this); } public void setMax(float value) { swigfaissJNI.MatrixStats_PerDimStats_max_set(swigCPtr, this, value); } public float getMax() { return swigfaissJNI.MatrixStats_PerDimStats_max_get(swigCPtr, this); } public void setSum(double value) { swigfaissJNI.MatrixStats_PerDimStats_sum_set(swigCPtr, this, value); } public double getSum() { return swigfaissJNI.MatrixStats_PerDimStats_sum_get(swigCPtr, this); } public void setSum2(double value) { swigfaissJNI.MatrixStats_PerDimStats_sum2_set(swigCPtr, this, value); } public double getSum2() { return swigfaissJNI.MatrixStats_PerDimStats_sum2_get(swigCPtr, this); } public void setN_valid(long value) { swigfaissJNI.MatrixStats_PerDimStats_n_valid_set(swigCPtr, this, value); } public long getN_valid() { return swigfaissJNI.MatrixStats_PerDimStats_n_valid_get(swigCPtr, this); } public void setMean(double value) { swigfaissJNI.MatrixStats_PerDimStats_mean_set(swigCPtr, this, value); } public double getMean() { return swigfaissJNI.MatrixStats_PerDimStats_mean_get(swigCPtr, this); } public void setStddev(double value) { swigfaissJNI.MatrixStats_PerDimStats_stddev_set(swigCPtr, this, value); } public double getStddev() { return swigfaissJNI.MatrixStats_PerDimStats_stddev_get(swigCPtr, this); } public PerDimStats() { this(swigfaissJNI.new_MatrixStats_PerDimStats(), true); } public void add(float x) { swigfaissJNI.MatrixStats_PerDimStats_add(swigCPtr, this, x); } public void compute_mean_std() { swigfaissJNI.MatrixStats_PerDimStats_compute_mean_std(swigCPtr, this); } } public void setPer_dim_stats(SWIGTYPE_p_std__vectorT_faiss__MatrixStats__PerDimStats_t value) { swigfaissJNI.MatrixStats_per_dim_stats_set(swigCPtr, this, SWIGTYPE_p_std__vectorT_faiss__MatrixStats__PerDimStats_t.getCPtr(value)); } public SWIGTYPE_p_std__vectorT_faiss__MatrixStats__PerDimStats_t getPer_dim_stats() { long cPtr = swigfaissJNI.MatrixStats_per_dim_stats_get(swigCPtr, this); return (cPtr == 0) ? null : new SWIGTYPE_p_std__vectorT_faiss__MatrixStats__PerDimStats_t(cPtr, false); } static public class Occurrence { private transient long swigCPtr; protected transient boolean swigCMemOwn; protected Occurrence(long cPtr, boolean cMemoryOwn) { swigCMemOwn = cMemoryOwn; swigCPtr = cPtr; } protected static long getCPtr(Occurrence obj) { return (obj == null) ? 0 : obj.swigCPtr; } protected void finalize() { delete(); } public synchronized void delete() { if (swigCPtr != 0) { if (swigCMemOwn) { swigCMemOwn = false; swigfaissJNI.delete_MatrixStats_Occurrence(swigCPtr); } swigCPtr = 0; } } public void setFirst(long value) { swigfaissJNI.MatrixStats_Occurrence_first_set(swigCPtr, this, value); } public long getFirst() { return swigfaissJNI.MatrixStats_Occurrence_first_get(swigCPtr, this); } public void setCount(long value) { swigfaissJNI.MatrixStats_Occurrence_count_set(swigCPtr, this, value); } public long getCount() { return swigfaissJNI.MatrixStats_Occurrence_count_get(swigCPtr, this); } public Occurrence() { this(swigfaissJNI.new_MatrixStats_Occurrence(), true); } } public void setOccurrences(SWIGTYPE_p_std__unordered_mapT_unsigned_long_faiss__MatrixStats__Occurrence_t value) { swigfaissJNI.MatrixStats_occurrences_set(swigCPtr, this, SWIGTYPE_p_std__unordered_mapT_unsigned_long_faiss__MatrixStats__Occurrence_t.getCPtr(value)); } public SWIGTYPE_p_std__unordered_mapT_unsigned_long_faiss__MatrixStats__Occurrence_t getOccurrences() { long cPtr = swigfaissJNI.MatrixStats_occurrences_get(swigCPtr, this); return (cPtr == 0) ? null : new SWIGTYPE_p_std__unordered_mapT_unsigned_long_faiss__MatrixStats__Occurrence_t(cPtr, false); } public void setBuf(String value) { swigfaissJNI.MatrixStats_buf_set(swigCPtr, this, value); } public String getBuf() { return swigfaissJNI.MatrixStats_buf_get(swigCPtr, this); } public void setNbuf(long value) { swigfaissJNI.MatrixStats_nbuf_set(swigCPtr, this, value); } public long getNbuf() { return swigfaissJNI.MatrixStats_nbuf_get(swigCPtr, this); } public void do_comment(String fmt) { swigfaissJNI.MatrixStats_do_comment(swigCPtr, this, fmt); } }
apache-2.0
nitincchauhan/HikariCP
src/test/java/com/zaxxer/hikari/pool/SaturatedPoolTest830.java
6047
/* * Copyright (C) 2017 Brett Wooldridge * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.zaxxer.hikari.pool; import static com.zaxxer.hikari.pool.TestElf.getConcurrentBag; import static com.zaxxer.hikari.pool.TestElf.getPool; import static com.zaxxer.hikari.pool.TestElf.newHikariConfig; import static com.zaxxer.hikari.pool.TestElf.setSlf4jLogLevel; import static com.zaxxer.hikari.util.ClockSource.currentTime; import static com.zaxxer.hikari.util.ClockSource.elapsedMillis; import static com.zaxxer.hikari.util.UtilityElf.quietlySleep; import static java.lang.Math.round; import static java.util.concurrent.TimeUnit.SECONDS; import static org.junit.Assert.assertEquals; import java.sql.Connection; import java.sql.SQLException; import java.sql.Statement; import java.util.Arrays; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import org.apache.logging.log4j.Level; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.zaxxer.hikari.HikariConfig; import com.zaxxer.hikari.HikariDataSource; import com.zaxxer.hikari.mocks.StubConnection; import com.zaxxer.hikari.mocks.StubStatement; /** * @author Brett Wooldridge */ public class SaturatedPoolTest830 { private static final Logger LOGGER = LoggerFactory.getLogger(SaturatedPoolTest830.class); private static final int MAX_POOL_SIZE = 10; @Test public void saturatedPoolTest() throws Exception { HikariConfig config = newHikariConfig(); config.setMinimumIdle(5); config.setMaximumPoolSize(MAX_POOL_SIZE); config.setInitializationFailTimeout(Long.MAX_VALUE); config.setConnectionTimeout(1000); config.setDataSourceClassName("com.zaxxer.hikari.mocks.StubDataSource"); StubConnection.slowCreate = true; StubStatement.setSimulatedQueryTime(1000); setSlf4jLogLevel(HikariPool.class, Level.DEBUG); System.setProperty("com.zaxxer.hikari.housekeeping.periodMs", "5000"); final long start = currentTime(); try (final HikariDataSource ds = new HikariDataSource(config)) { LinkedBlockingQueue<Runnable> queue = new LinkedBlockingQueue<>(); ThreadPoolExecutor threadPool = new ThreadPoolExecutor( 50 /*core*/, 50 /*max*/, 2 /*keepalive*/, SECONDS, queue, new ThreadPoolExecutor.CallerRunsPolicy()); threadPool.allowCoreThreadTimeOut(true); AtomicInteger windowIndex = new AtomicInteger(); boolean[] failureWindow = new boolean[100]; Arrays.fill(failureWindow, true); // Initial saturation for (int i = 0; i < 50; i++) { threadPool.execute(() -> { try (Connection conn = ds.getConnection(); Statement stmt = conn.createStatement()) { stmt.execute("SELECT bogus FROM imaginary"); } catch (SQLException e) { LOGGER.info(e.getMessage()); } }); } long sleep = 80; outer: while (true) { quietlySleep(sleep); if (elapsedMillis(start) > SECONDS.toMillis(12) && sleep < 100) { sleep = 100; LOGGER.warn("Switching to 100ms sleep"); } else if (elapsedMillis(start) > SECONDS.toMillis(6) && sleep < 90) { sleep = 90; LOGGER.warn("Switching to 90ms sleep"); } threadPool.execute(() -> { int ndx = windowIndex.incrementAndGet() % failureWindow.length; try (Connection conn = ds.getConnection(); Statement stmt = conn.createStatement()) { stmt.execute("SELECT bogus FROM imaginary"); failureWindow[ndx] = false; } catch (SQLException e) { LOGGER.info(e.getMessage()); failureWindow[ndx] = true; } }); for (int i = 0; i < failureWindow.length; i++) { if (failureWindow[i]) { if (elapsedMillis(start) % (SECONDS.toMillis(1) - sleep) < sleep) { LOGGER.info("Active threads {}, submissions per second {}, waiting threads {}", threadPool.getActiveCount(), SECONDS.toMillis(1) / sleep, getPool(ds).getThreadsAwaitingConnection()); } continue outer; } } LOGGER.info("Timeouts have subsided."); LOGGER.info("Active threads {}, submissions per second {}, waiting threads {}", threadPool.getActiveCount(), SECONDS.toMillis(1) / sleep, getPool(ds).getThreadsAwaitingConnection()); break; } LOGGER.info("Waiting for completion of {} active tasks.", threadPool.getActiveCount()); while (getPool(ds).getActiveConnections() > 0) { quietlySleep(50); } assertEquals("Rate not in balance at 10req/s", SECONDS.toMillis(1) / sleep, 10L); } finally { StubStatement.setSimulatedQueryTime(0); StubConnection.slowCreate = false; System.clearProperty("com.zaxxer.hikari.housekeeping.periodMs"); setSlf4jLogLevel(HikariPool.class, Level.INFO); } } }
apache-2.0
apache/incubator-systemml
src/main/java/org/apache/sysds/runtime/matrix/data/LibMatrixCuDNNInputRowFetcher.java
3129
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sysds.runtime.matrix.data; import static jcuda.runtime.JCuda.cudaMemset; import org.apache.sysds.runtime.DMLRuntimeException; import org.apache.sysds.runtime.controlprogram.caching.MatrixObject; import org.apache.sysds.runtime.instructions.gpu.context.CSRPointer; import org.apache.sysds.runtime.instructions.gpu.context.GPUContext; import jcuda.Pointer; /** * Performs a slice operation: out = in[(n+1):(n+1), 1:numColumns] */ public class LibMatrixCuDNNInputRowFetcher extends LibMatrixCUDA implements java.lang.AutoCloseable { GPUContext gCtx; String instName; int numColumns; boolean isInputInSparseFormat; Object inPointer; // can be either CSRPointer or Pointer Pointer outPointer; /** * Initialize the input fetcher * * @param gCtx current gpu context * @param instName name of the instruction * @param image input matrix object. */ public LibMatrixCuDNNInputRowFetcher(GPUContext gCtx, String instName, MatrixObject image) { this.gCtx = gCtx; this.instName = instName; numColumns = LibMatrixCUDA.toInt(image.getNumColumns()); isInputInSparseFormat = LibMatrixCUDA.isInSparseFormat(gCtx, image); inPointer = isInputInSparseFormat ? LibMatrixCUDA.getSparsePointer(gCtx, image, instName) : LibMatrixCuDNN.getDensePointerForCuDNN(gCtx, image, instName); outPointer = gCtx.allocate(instName, numColumns*sizeOfDataType); } /** * Copy the nth row and return the dense pointer * @param n zero-based row index * @return dense pointer containing the nth row. This row is reused in the next iteration */ public Pointer getNthRow(int n) { if(isInputInSparseFormat) { jcuda.runtime.JCuda.cudaDeviceSynchronize(); cudaMemset(outPointer, 0, numColumns*sizeOfDataType); jcuda.runtime.JCuda.cudaDeviceSynchronize(); LibMatrixCUDA.sliceSparseDense(gCtx, instName, (CSRPointer)inPointer, outPointer, n, n, 0, LibMatrixCUDA.toInt(numColumns-1), numColumns); } else { LibMatrixCUDA.sliceDenseDense(gCtx, instName, (Pointer)inPointer, outPointer, n, n, 0, LibMatrixCUDA.toInt(numColumns-1), numColumns); } return outPointer; } /** * Deallocates temporary pointer */ @Override public void close() { try { gCtx.cudaFreeHelper(null, outPointer, true); } catch (DMLRuntimeException e) { throw new RuntimeException(e); } } }
apache-2.0
yanzhijun/jclouds-aliyun
apis/route53/src/main/java/org/jclouds/route53/Route53ApiMetadata.java
2718
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jclouds.route53; import static org.jclouds.aws.reference.AWSConstants.PROPERTY_AUTH_TAG; import static org.jclouds.aws.reference.AWSConstants.PROPERTY_HEADER_TAG; import java.net.URI; import java.util.Properties; import org.jclouds.apis.ApiMetadata; import org.jclouds.rest.internal.BaseHttpApiMetadata; import org.jclouds.route53.config.Route53HttpApiModule; import com.google.auto.service.AutoService; /** * Implementation of {@link ApiMetadata} for Amazon's Route53 api. */ @AutoService(ApiMetadata.class) public class Route53ApiMetadata extends BaseHttpApiMetadata<Route53Api> { @Override public Builder toBuilder() { return new Builder().fromApiMetadata(this); } public Route53ApiMetadata() { this(new Builder()); } protected Route53ApiMetadata(Builder builder) { super(Builder.class.cast(builder)); } public static Properties defaultProperties() { Properties properties = BaseHttpApiMetadata.defaultProperties(); properties.setProperty(PROPERTY_AUTH_TAG, "AWS"); properties.setProperty(PROPERTY_HEADER_TAG, "amz"); return properties; } public static class Builder extends BaseHttpApiMetadata.Builder<Route53Api, Builder> { protected Builder() { id("route53") .name("Amazon Route 53 Api") .identityName("Access Key ID") .credentialName("Secret Access Key") .version("2012-02-29") .documentation(URI.create("http://docs.aws.amazon.com/Route53/latest/APIReference/")) .defaultEndpoint("https://route53.amazonaws.com") .defaultProperties(Route53ApiMetadata.defaultProperties()) .defaultModule(Route53HttpApiModule.class); } @Override public Route53ApiMetadata build() { return new Route53ApiMetadata(this); } @Override protected Builder self() { return this; } } }
apache-2.0
juanavelez/hazelcast
hazelcast/src/main/java/com/hazelcast/multimap/impl/MultiMapProxySupport.java
10958
/* * Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.multimap.impl; import com.hazelcast.concurrent.lock.LockProxySupport; import com.hazelcast.concurrent.lock.LockServiceImpl; import com.hazelcast.config.MultiMapConfig; import com.hazelcast.core.EntryEventType; import com.hazelcast.multimap.impl.operations.CountOperation; import com.hazelcast.multimap.impl.operations.GetAllOperation; import com.hazelcast.multimap.impl.operations.MultiMapOperationFactory; import com.hazelcast.multimap.impl.operations.MultiMapOperationFactory.OperationFactoryType; import com.hazelcast.multimap.impl.operations.MultiMapResponse; import com.hazelcast.multimap.impl.operations.PutOperation; import com.hazelcast.multimap.impl.operations.RemoveAllOperation; import com.hazelcast.multimap.impl.operations.RemoveOperation; import com.hazelcast.nio.serialization.Data; import com.hazelcast.spi.AbstractDistributedObject; import com.hazelcast.spi.DistributedObjectNamespace; import com.hazelcast.spi.NodeEngine; import com.hazelcast.spi.Operation; import com.hazelcast.util.ExceptionUtil; import com.hazelcast.util.ThreadUtil; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.concurrent.Future; public abstract class MultiMapProxySupport extends AbstractDistributedObject<MultiMapService> { protected final MultiMapConfig config; protected final String name; protected final LockProxySupport lockSupport; protected MultiMapProxySupport(MultiMapService service, NodeEngine nodeEngine, String name) { super(nodeEngine, service); this.config = nodeEngine.getConfig().findMultiMapConfig(name); this.name = name; lockSupport = new LockProxySupport(new DistributedObjectNamespace(MultiMapService.SERVICE_NAME, name), LockServiceImpl.getMaxLeaseTimeInMillis(nodeEngine.getProperties())); } @Override public String getName() { return name; } protected Boolean putInternal(Data dataKey, Data dataValue, int index) { try { PutOperation operation = new PutOperation(name, dataKey, getThreadId(), dataValue, index); return invoke(operation, dataKey); } catch (Throwable throwable) { throw ExceptionUtil.rethrow(throwable); } } protected MultiMapResponse getAllInternal(Data dataKey) { try { GetAllOperation operation = new GetAllOperation(name, dataKey); operation.setThreadId(ThreadUtil.getThreadId()); return invoke(operation, dataKey); } catch (Throwable throwable) { throw ExceptionUtil.rethrow(throwable); } } protected Boolean removeInternal(Data dataKey, Data dataValue) { try { RemoveOperation operation = new RemoveOperation(name, dataKey, getThreadId(), dataValue); return invoke(operation, dataKey); } catch (Throwable throwable) { throw ExceptionUtil.rethrow(throwable); } } protected MultiMapResponse removeInternal(Data dataKey) { try { RemoveAllOperation operation = new RemoveAllOperation(name, dataKey, getThreadId()); return invoke(operation, dataKey); } catch (Throwable throwable) { throw ExceptionUtil.rethrow(throwable); } } protected Set<Data> localKeySetInternal() { return getService().localKeySet(name); } protected Set<Data> keySetInternal() { final NodeEngine nodeEngine = getNodeEngine(); try { Map<Integer, Object> results = nodeEngine.getOperationService() .invokeOnAllPartitions( MultiMapService.SERVICE_NAME, new MultiMapOperationFactory(name, OperationFactoryType.KEY_SET) ); Set<Data> keySet = new HashSet<Data>(); for (Object result : results.values()) { if (result == null) { continue; } MultiMapResponse response = nodeEngine.toObject(result); if (response.getCollection() != null) { keySet.addAll(response.getCollection()); } } return keySet; } catch (Throwable throwable) { throw ExceptionUtil.rethrow(throwable); } } protected Map valuesInternal() { final NodeEngine nodeEngine = getNodeEngine(); try { Map<Integer, Object> results = nodeEngine.getOperationService() .invokeOnAllPartitions( MultiMapService.SERVICE_NAME, new MultiMapOperationFactory(name, OperationFactoryType.VALUES) ); return results; } catch (Throwable throwable) { throw ExceptionUtil.rethrow(throwable); } } protected Map entrySetInternal() { final NodeEngine nodeEngine = getNodeEngine(); try { Map<Integer, Object> results = nodeEngine.getOperationService() .invokeOnAllPartitions( MultiMapService.SERVICE_NAME, new MultiMapOperationFactory(name, OperationFactoryType.ENTRY_SET) ); return results; } catch (Throwable throwable) { throw ExceptionUtil.rethrow(throwable); } } protected boolean containsInternal(Data key, Data value) { final NodeEngine nodeEngine = getNodeEngine(); try { Map<Integer, Object> results = nodeEngine.getOperationService() .invokeOnAllPartitions( MultiMapService.SERVICE_NAME, new MultiMapOperationFactory(name, OperationFactoryType.CONTAINS, key, value, ThreadUtil.getThreadId()) ); for (Object obj : results.values()) { if (obj == null) { continue; } Boolean result = nodeEngine.toObject(obj); if (result) { return true; } } return false; } catch (Throwable throwable) { throw ExceptionUtil.rethrow(throwable); } } public int size() { final NodeEngine nodeEngine = getNodeEngine(); try { Map<Integer, Object> results = nodeEngine.getOperationService() .invokeOnAllPartitions( MultiMapService.SERVICE_NAME, new MultiMapOperationFactory(name, OperationFactoryType.SIZE) ); int size = 0; for (Object obj : results.values()) { if (obj == null) { continue; } Integer result = nodeEngine.toObject(obj); size += result; } return size; } catch (Throwable throwable) { throw ExceptionUtil.rethrow(throwable); } } public void clear() { final NodeEngine nodeEngine = getNodeEngine(); try { final Map<Integer, Object> resultMap = nodeEngine.getOperationService().invokeOnAllPartitions( MultiMapService.SERVICE_NAME, new MultiMapOperationFactory(name, OperationFactoryType.CLEAR) ); int numberOfAffectedEntries = 0; for (Object o : resultMap.values()) { numberOfAffectedEntries += (Integer) o; } publishMultiMapEvent(numberOfAffectedEntries, EntryEventType.CLEAR_ALL); } catch (Throwable throwable) { throw ExceptionUtil.rethrow(throwable); } } private void publishMultiMapEvent(int numberOfAffectedEntries, EntryEventType eventType) { getService().publishMultiMapEvent(name, eventType, numberOfAffectedEntries); } protected Integer countInternal(Data dataKey) { try { CountOperation operation = new CountOperation(name, dataKey); operation.setThreadId(ThreadUtil.getThreadId()); return invoke(operation, dataKey); } catch (Throwable throwable) { throw ExceptionUtil.rethrow(throwable); } } @Override public String getServiceName() { return MultiMapService.SERVICE_NAME; } private <T> T invoke(Operation operation, Data dataKey) { final NodeEngine nodeEngine = getNodeEngine(); try { int partitionId = nodeEngine.getPartitionService().getPartitionId(dataKey); Future f; Object o; if (config.isStatisticsEnabled()) { long startTimeNanos = System.nanoTime(); f = nodeEngine.getOperationService() .invokeOnPartition(MultiMapService.SERVICE_NAME, operation, partitionId); o = f.get(); if (operation instanceof PutOperation) { //TODO @ali should we remove statics from operations ? getService().getLocalMultiMapStatsImpl(name).incrementPutLatencyNanos(System.nanoTime() - startTimeNanos); } else if (operation instanceof RemoveOperation || operation instanceof RemoveAllOperation) { getService().getLocalMultiMapStatsImpl(name).incrementRemoveLatencyNanos(System.nanoTime() - startTimeNanos); } else if (operation instanceof GetAllOperation) { getService().getLocalMultiMapStatsImpl(name).incrementGetLatencyNanos(System.nanoTime() - startTimeNanos); } } else { f = nodeEngine.getOperationService() .invokeOnPartition(MultiMapService.SERVICE_NAME, operation, partitionId); o = f.get(); } return nodeEngine.toObject(o); } catch (Throwable throwable) { throw ExceptionUtil.rethrow(throwable); } } private long getThreadId() { return ThreadUtil.getThreadId(); } @Override public String toString() { return "MultiMap{name=" + name + '}'; } }
apache-2.0
flowable/flowable-engine
modules/flowable-cmmn-engine/src/test/java/org/flowable/cmmn/test/runtime/HumanTaskTest.java
20270
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.flowable.cmmn.test.runtime; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.tuple; import java.util.Arrays; import java.util.Collections; import java.util.List; import org.flowable.cmmn.api.history.HistoricPlanItemInstance; import org.flowable.cmmn.api.repository.CaseDefinition; import org.flowable.cmmn.api.runtime.CaseInstance; import org.flowable.cmmn.api.runtime.PlanItemDefinitionType; import org.flowable.cmmn.api.runtime.PlanItemInstance; import org.flowable.cmmn.engine.test.CmmnDeployment; import org.flowable.cmmn.engine.test.FlowableCmmnTestCase; import org.flowable.cmmn.engine.test.impl.CmmnHistoryTestHelper; import org.flowable.cmmn.engine.test.impl.CmmnTestHelper; import org.flowable.common.engine.api.constant.ReferenceTypes; import org.flowable.common.engine.api.scope.ScopeTypes; import org.flowable.common.engine.impl.history.HistoryLevel; import org.flowable.common.engine.impl.identity.Authentication; import org.flowable.entitylink.api.EntityLink; import org.flowable.entitylink.api.EntityLinkType; import org.flowable.entitylink.api.HierarchyType; import org.flowable.entitylink.api.history.HistoricEntityLink; import org.flowable.identitylink.api.IdentityLink; import org.flowable.identitylink.api.IdentityLinkType; import org.flowable.identitylink.api.history.HistoricIdentityLink; import org.flowable.task.api.Task; import org.flowable.task.api.history.HistoricTaskInstance; import org.junit.Test; import com.fasterxml.jackson.databind.node.ArrayNode; /** * @author Tijs Rademakers * @author Joram Barrez */ public class HumanTaskTest extends FlowableCmmnTestCase { @Test @CmmnDeployment public void testHumanTask() { Authentication.setAuthenticatedUserId("JohnDoe"); CaseInstance caseInstance = cmmnRuntimeService.createCaseInstanceBuilder() .caseDefinitionKey("myCase") .start(); assertThat(caseInstance).isNotNull(); Task task = cmmnTaskService.createTaskQuery().caseInstanceId(caseInstance.getId()).singleResult(); assertThat(task.getName()).isEqualTo("Task One"); assertThat(task.getAssignee()).isEqualTo("JohnDoe"); String task1Id = task.getId(); List<EntityLink> entityLinks = cmmnRuntimeService.getEntityLinkChildrenForCaseInstance(caseInstance.getId()); assertThat(entityLinks).hasSize(1); EntityLink entityLink = entityLinks.get(0); assertThat(entityLink.getLinkType()).isEqualTo(EntityLinkType.CHILD); assertThat(entityLink.getCreateTime()).isNotNull(); assertThat(entityLink.getScopeId()).isEqualTo(caseInstance.getId()); assertThat(entityLink.getScopeType()).isEqualTo(ScopeTypes.CMMN); assertThat(entityLink.getScopeDefinitionId()).isNull(); assertThat(entityLink.getReferenceScopeId()).isEqualTo(task.getId()); assertThat(entityLink.getReferenceScopeType()).isEqualTo(ScopeTypes.TASK); assertThat(entityLink.getReferenceScopeDefinitionId()).isNull(); assertThat(entityLink.getHierarchyType()).isEqualTo(HierarchyType.ROOT); assertThat(cmmnTaskService.getIdentityLinksForTask(task1Id)) .extracting(IdentityLink::getType, IdentityLink::getUserId, IdentityLink::getGroupId) .containsExactlyInAnyOrder( tuple("assignee", "JohnDoe", null) ); PlanItemInstance taskPlanItemInstance = cmmnRuntimeService.createPlanItemInstanceQuery() .planItemDefinitionType(PlanItemDefinitionType.HUMAN_TASK) .planItemInstanceStateActive() .singleResult(); assertThat(taskPlanItemInstance).isNotNull(); assertThat(taskPlanItemInstance.getReferenceId()).isEqualTo(task.getId()); assertThat(taskPlanItemInstance.getReferenceType()).isEqualTo(ReferenceTypes.PLAN_ITEM_CHILD_HUMAN_TASK); if (CmmnHistoryTestHelper.isHistoryLevelAtLeast(HistoryLevel.ACTIVITY, cmmnEngineConfiguration)) { HistoricPlanItemInstance historicTaskPlanItemInstance = cmmnHistoryService.createHistoricPlanItemInstanceQuery() .planItemInstanceId(taskPlanItemInstance.getId()) .singleResult(); assertThat(historicTaskPlanItemInstance.getReferenceId()).isEqualTo(task.getId()); assertThat(historicTaskPlanItemInstance.getReferenceType()).isEqualTo(ReferenceTypes.PLAN_ITEM_CHILD_HUMAN_TASK); } cmmnTaskService.complete(task.getId()); task = cmmnTaskService.createTaskQuery().caseInstanceId(caseInstance.getId()).singleResult(); assertThat(task.getName()).isEqualTo("Task Two"); assertThat(task.getAssignee()).isNull(); String task2Id = task.getId(); task = cmmnTaskService.createTaskQuery().taskCandidateGroup("test").caseInstanceId(caseInstance.getId()).singleResult(); assertThat(task.getName()).isEqualTo("Task Two"); task = cmmnTaskService.createTaskQuery().taskCandidateUser("test2").caseInstanceId(caseInstance.getId()).singleResult(); assertThat(task.getName()).isEqualTo("Task Two"); assertThat(cmmnTaskService.getIdentityLinksForTask(task2Id)) .extracting(IdentityLink::getType, IdentityLink::getUserId, IdentityLink::getGroupId) .containsExactlyInAnyOrder( tuple("candidate", "test", null), tuple("candidate", "test2", null), tuple("candidate", null, "test") ); cmmnTaskService.complete(task.getId()); assertThat(cmmnRuntimeService.createCaseInstanceQuery().count()).isZero(); if (CmmnHistoryTestHelper.isHistoryLevelAtLeast(HistoryLevel.ACTIVITY, cmmnEngineConfiguration)) { assertThat(cmmnHistoryService.createHistoricVariableInstanceQuery() .caseInstanceId(caseInstance.getId()) .variableName("var1") .singleResult().getValue()).isEqualTo("JohnDoe"); List<HistoricEntityLink> historicEntityLinks = cmmnHistoryService.getHistoricEntityLinkChildrenForCaseInstance(caseInstance.getId()); for (HistoricEntityLink historicEntityLink : historicEntityLinks) { assertThat(historicEntityLink.getLinkType()).isEqualTo(EntityLinkType.CHILD); assertThat(historicEntityLink.getCreateTime()).isNotNull(); assertThat(historicEntityLink.getScopeId()).isEqualTo(caseInstance.getId()); assertThat(historicEntityLink.getScopeType()).isEqualTo(ScopeTypes.CMMN); assertThat(historicEntityLink.getScopeDefinitionId()).isNull(); assertThat(historicEntityLink.getReferenceScopeType()).isEqualTo(ScopeTypes.TASK); assertThat(historicEntityLink.getReferenceScopeDefinitionId()).isNull(); assertThat(entityLink.getHierarchyType()).isEqualTo(HierarchyType.ROOT); } assertThat(historicEntityLinks) .extracting(HistoricEntityLink::getReferenceScopeId) .containsExactlyInAnyOrder(task1Id, task2Id); assertThat(cmmnHistoryService.getHistoricIdentityLinksForTask(task1Id)) .extracting(HistoricIdentityLink::getType, HistoricIdentityLink::getUserId, HistoricIdentityLink::getGroupId) .containsExactlyInAnyOrder( tuple("assignee", "JohnDoe", null) ); assertThat(cmmnHistoryService.getHistoricIdentityLinksForTask(task2Id)) .extracting(HistoricIdentityLink::getType, HistoricIdentityLink::getUserId, HistoricIdentityLink::getGroupId) .containsExactlyInAnyOrder( tuple("candidate", "test", null), tuple("candidate", "test2", null), tuple("candidate", null, "test") ); } Authentication.setAuthenticatedUserId(null); } @Test public void testCreateHumanTaskUnderTenantByKey() { Authentication.setAuthenticatedUserId("JohnDoe"); org.flowable.cmmn.api.repository.CmmnDeployment deployment = cmmnRepositoryService.createDeployment().tenantId("flowable"). addClasspathResource("org/flowable/cmmn/test/runtime/HumanTaskTest.testHumanTask.cmmn").deploy(); try { assertThat(deployment.getTenantId()).isEqualTo("flowable"); CaseInstance caseInstance = cmmnRuntimeService.createCaseInstanceBuilder() .caseDefinitionKey("myCase") .tenantId("flowable") .start(); assertThat(caseInstance).isNotNull(); assertThat(caseInstance.getTenantId()).isEqualTo("flowable"); Task task = cmmnTaskService.createTaskQuery().caseInstanceId(caseInstance.getId()).singleResult(); assertThat(task.getName()).isEqualTo("Task One"); assertThat(task.getAssignee()).isEqualTo("JohnDoe"); assertThat(task.getTenantId()).isEqualTo("flowable"); cmmnTaskService.complete(task.getId()); task = cmmnTaskService.createTaskQuery().caseInstanceId(caseInstance.getId()).singleResult(); assertThat(task.getName()).isEqualTo("Task Two"); assertThat(task.getTenantId()).isEqualTo("flowable"); cmmnTaskService.complete(task.getId()); assertThat(cmmnRuntimeService.createCaseInstanceQuery().count()).isZero(); } finally { CmmnTestHelper.deleteDeployment(cmmnEngineConfiguration, deployment.getId()); Authentication.setAuthenticatedUserId(null); } } @Test public void testCreateHumanTaskUnderTenantById() { Authentication.setAuthenticatedUserId("JohnDoe"); org.flowable.cmmn.api.repository.CmmnDeployment deployment = cmmnRepositoryService.createDeployment().tenantId("flowable"). addClasspathResource("org/flowable/cmmn/test/runtime/HumanTaskTest.testHumanTask.cmmn").deploy(); try { assertThat(deployment.getTenantId()).isEqualTo("flowable"); CaseDefinition caseDefinition = cmmnRepositoryService.createCaseDefinitionQuery().deploymentId(deployment.getId()).singleResult(); assertThat(caseDefinition.getTenantId()).isEqualTo("flowable"); CaseInstance caseInstance = cmmnRuntimeService.createCaseInstanceBuilder() .caseDefinitionId(caseDefinition.getId()) .tenantId("flowable") .start(); assertThat(caseInstance).isNotNull(); assertThat(caseInstance.getTenantId()).isEqualTo("flowable"); Task task = cmmnTaskService.createTaskQuery().caseInstanceId(caseInstance.getId()).singleResult(); assertThat(task.getName()).isEqualTo("Task One"); assertThat(task.getAssignee()).isEqualTo("JohnDoe"); assertThat(task.getTenantId()).isEqualTo("flowable"); cmmnTaskService.complete(task.getId()); task = cmmnTaskService.createTaskQuery().caseInstanceId(caseInstance.getId()).singleResult(); assertThat(task.getTenantId()).isEqualTo("flowable"); cmmnTaskService.complete(task.getId()); assertThat(cmmnRuntimeService.createCaseInstanceQuery().count()).isZero(); } finally { CmmnTestHelper.deleteDeployment(cmmnEngineConfiguration, deployment.getId()); Authentication.setAuthenticatedUserId(null); } } @Test @CmmnDeployment public void testTaskCompletionExitsStage() { CaseInstance caseInstance = cmmnRuntimeService.createCaseInstanceBuilder() .caseDefinitionKey("humanTaskCompletionExits") .start(); assertThat(caseInstance).isNotNull(); List<Task> tasks = cmmnTaskService.createTaskQuery().caseInstanceId(caseInstance.getId()).orderByTaskName().asc().list(); assertThat(tasks) .extracting(Task::getName) .containsExactly("A", "B", "C"); // Completing A should delete B and C cmmnTaskService.complete(tasks.get(0).getId()); assertThat(cmmnTaskService.createTaskQuery().caseInstanceId(caseInstance.getId()).count()).isZero(); assertCaseInstanceEnded(caseInstance); if (CmmnHistoryTestHelper.isHistoryLevelAtLeast(HistoryLevel.ACTIVITY, cmmnEngineConfiguration)) { List<HistoricTaskInstance> historicTaskInstances = cmmnHistoryService.createHistoricTaskInstanceQuery().list(); assertThat(historicTaskInstances).hasSize(3); for (HistoricTaskInstance historicTaskInstance : historicTaskInstances) { assertThat(historicTaskInstance.getStartTime()).isNotNull(); assertThat(historicTaskInstance.getEndTime()).isNotNull(); if (!"A".equals(historicTaskInstance.getName())) { assertThat(historicTaskInstance.getDeleteReason()).isEqualTo("cmmn-state-transition-terminate-case"); } } } // Completing C should delete B CaseInstance caseInstance2 = cmmnRuntimeService.createCaseInstanceBuilder() .caseDefinitionKey("humanTaskCompletionExits") .start(); assertThat(caseInstance2).isNotNull(); tasks = cmmnTaskService.createTaskQuery().caseInstanceId(caseInstance2.getId()).orderByTaskName().asc().list(); cmmnTaskService.complete(tasks.get(2).getId()); Task taskA = cmmnTaskService.createTaskQuery().caseInstanceId(caseInstance2.getId()).orderByTaskName().asc().singleResult(); assertThat(taskA).isNotNull(); cmmnTaskService.complete(taskA.getId()); assertCaseInstanceEnded(caseInstance2); if (CmmnHistoryTestHelper.isHistoryLevelAtLeast(HistoryLevel.ACTIVITY, cmmnEngineConfiguration)) { List<HistoricTaskInstance> historicTaskInstances = cmmnHistoryService.createHistoricTaskInstanceQuery().caseInstanceId(caseInstance2.getId()).list(); assertThat(historicTaskInstances).hasSize(3); for (HistoricTaskInstance historicTaskInstance : historicTaskInstances) { assertThat(historicTaskInstance.getStartTime()).isNotNull(); assertThat(historicTaskInstance.getEndTime()).isNotNull(); if ("B".equals(historicTaskInstance.getName())) { assertThat(historicTaskInstance.getDeleteReason()).isEqualTo("cmmn-state-transition-exit"); } } } } @Test @CmmnDeployment(resources = "org/flowable/cmmn/test/runtime/HumanTaskTest.testHumanTask.cmmn") public void addCompleteAuthenticatedUserAsParticipantToParentCase() { CaseInstance caseInstance = cmmnRuntimeService.createCaseInstanceBuilder() .caseDefinitionKey("myCase") .start(); assertThat(caseInstance).isNotNull(); Task task = cmmnTaskService.createTaskQuery().caseInstanceId(caseInstance.getId()).singleResult(); assertThat(task.getName()).isEqualTo("Task One"); assertThat(task.getAssignee()).isNull(); assertThat(cmmnRuntimeService.getIdentityLinksForCaseInstance(caseInstance.getId())).isEmpty(); String prevUserId = Authentication.getAuthenticatedUserId(); Authentication.setAuthenticatedUserId("JohnDoe"); try { cmmnTaskService.complete(task.getId()); } finally { Authentication.setAuthenticatedUserId(prevUserId); } assertThat(cmmnRuntimeService.getIdentityLinksForCaseInstance(caseInstance.getId())) .extracting(IdentityLink::getType, IdentityLink::getUserId, IdentityLink::getGroupId) .containsExactlyInAnyOrder( tuple(IdentityLinkType.PARTICIPANT, "JohnDoe", null), tuple(IdentityLinkType.PARTICIPANT, "test", null), tuple(IdentityLinkType.PARTICIPANT, "test2", null) ); } @Test @CmmnDeployment(resources = "org/flowable/cmmn/test/runtime/HumanTaskTest.testHumanTaskCandidatesExpression.cmmn") public void humanTaskWithCollectionExpressionCandidates() { cmmnRuntimeService.createCaseInstanceBuilder() .caseDefinitionKey("myCase") .transientVariable("userVar", Arrays.asList("kermit", "gonzo")) .transientVariable("groupVar", Collections.singletonList("management")) .start(); Task task = cmmnTaskService.createTaskQuery().singleResult(); assertThat(task).isNotNull(); assertThat(cmmnTaskService.getIdentityLinksForTask(task.getId())) .extracting(IdentityLink::getType, IdentityLink::getUserId, IdentityLink::getGroupId) .containsExactlyInAnyOrder( tuple(IdentityLinkType.CANDIDATE, "kermit", null), tuple(IdentityLinkType.CANDIDATE, "gonzo", null), tuple(IdentityLinkType.CANDIDATE, null, "management") ); } @Test @CmmnDeployment(resources = "org/flowable/cmmn/test/runtime/HumanTaskTest.testHumanTaskCandidatesExpression.cmmn") public void humanTaskWithArrayNodeExpressionCandidates() { ArrayNode userVar = cmmnEngineConfiguration.getObjectMapper().createArrayNode(); userVar.add("kermit"); ArrayNode groupVar = cmmnEngineConfiguration.getObjectMapper().createArrayNode(); groupVar.add("management").add("sales"); cmmnRuntimeService.createCaseInstanceBuilder() .caseDefinitionKey("myCase") .transientVariable("userVar", userVar) .transientVariable("groupVar", groupVar) .start(); Task task = cmmnTaskService.createTaskQuery().singleResult(); assertThat(task).isNotNull(); assertThat(cmmnTaskService.getIdentityLinksForTask(task.getId())) .extracting(IdentityLink::getType, IdentityLink::getUserId, IdentityLink::getGroupId) .containsExactlyInAnyOrder( tuple(IdentityLinkType.CANDIDATE, "kermit", null), tuple(IdentityLinkType.CANDIDATE, null, "management"), tuple(IdentityLinkType.CANDIDATE, null, "sales") ); } @Test @CmmnDeployment(resources = "org/flowable/cmmn/test/runtime/HumanTaskTest.testHumanTaskIdVariableName.cmmn") public void testHumanTaskIdVariableName() { Authentication.setAuthenticatedUserId("JohnDoe"); CaseInstance caseInstance = cmmnRuntimeService.createCaseInstanceBuilder() .caseDefinitionKey("myCase") .start(); // Normal string Task firstTask = cmmnTaskService.createTaskQuery().caseInstanceId(caseInstance.getId()).taskDefinitionKey("task1").singleResult(); assertThat(firstTask).isNotNull(); String actualTaskId = firstTask.getId(); String myTaskId = (String)cmmnRuntimeService.getVariable(caseInstance.getId(), "myTaskId"); assertThat(myTaskId).isEqualTo(actualTaskId); // Expression Task secondTask = cmmnTaskService.createTaskQuery().caseInstanceId(caseInstance.getId()).taskDefinitionKey("task2").singleResult(); assertThat(secondTask).isNotNull(); actualTaskId = secondTask.getId(); String myExpressionTaskId = (String)cmmnRuntimeService.getVariable(caseInstance.getId(), "myExpressionTaskId"); assertThat(myExpressionTaskId).isEqualTo(actualTaskId); } }
apache-2.0
CC4401-TeraCity/TeraCity
engine/src/main/java/org/terasology/entitySystem/prefab/internal/PojoPrefabManager.java
2223
/* * Copyright 2013 MovingBlocks * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.terasology.entitySystem.prefab.internal; import com.google.common.collect.Sets; import org.terasology.asset.AssetManager; import org.terasology.asset.AssetType; import org.terasology.asset.Assets; import org.terasology.registry.CoreRegistry; import org.terasology.entitySystem.Component; import org.terasology.entitySystem.prefab.Prefab; import org.terasology.entitySystem.prefab.PrefabManager; import java.util.Collection; /** * Basic implementation of PrefabManager. * * @author Immortius * @author Rasmus 'Cervator' Praestholm * @see PrefabManager */ public class PojoPrefabManager implements PrefabManager { /** * {@inheritDoc} */ @Override public Prefab getPrefab(String name) { if (!name.isEmpty()) { return Assets.getPrefab(name); } return null; } /** * {@inheritDoc} */ @Override public boolean exists(String name) { return Assets.getPrefab(name) != null; } /** * {@inheritDoc} */ @Override public Iterable<Prefab> listPrefabs() { return CoreRegistry.get(AssetManager.class).listLoadedAssets(AssetType.PREFAB, Prefab.class); } /** * {@inheritDoc} */ @Override public Collection<Prefab> listPrefabs(Class<? extends Component> comp) { Collection<Prefab> prefabs = Sets.newHashSet(); for (Prefab p : CoreRegistry.get(AssetManager.class).listLoadedAssets(AssetType.PREFAB, Prefab.class)) { if (p.getComponent(comp) != null) { prefabs.add(p); } } return prefabs; } }
apache-2.0
sdole/aws-sdk-java
aws-java-sdk-cloudfront/src/main/java/com/amazonaws/services/cloudfront/model/ListDistributionsByWebACLIdRequest.java
11100
/* * Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.cloudfront.model; import java.io.Serializable; import com.amazonaws.AmazonWebServiceRequest; /** * Container for the parameters to the {@link com.amazonaws.services.cloudfront.AmazonCloudFront#listDistributionsByWebACLId(ListDistributionsByWebACLIdRequest) ListDistributionsByWebACLId operation}. * <p> * List the distributions that are associated with a specified AWS WAF * web ACL. * </p> * * @see com.amazonaws.services.cloudfront.AmazonCloudFront#listDistributionsByWebACLId(ListDistributionsByWebACLIdRequest) */ public class ListDistributionsByWebACLIdRequest extends AmazonWebServiceRequest implements Serializable, Cloneable { /** * Use Marker and MaxItems to control pagination of results. If you have * more than MaxItems distributions that satisfy the request, the * response includes a NextMarker element. To get the next page of * results, submit another request. For the value of Marker, specify the * value of NextMarker from the last response. (For the first request, * omit Marker.) */ private String marker; /** * The maximum number of distributions that you want CloudFront to return * in the response body. The maximum and default values are both 100. */ private String maxItems; /** * The Id of the AWS WAF web ACL for which you want to list the * associated distributions. If you specify "null" for the Id, the * request returns a list of the distributions that aren't associated * with a web ACL. */ private String webACLId; /** * Use Marker and MaxItems to control pagination of results. If you have * more than MaxItems distributions that satisfy the request, the * response includes a NextMarker element. To get the next page of * results, submit another request. For the value of Marker, specify the * value of NextMarker from the last response. (For the first request, * omit Marker.) * * @return Use Marker and MaxItems to control pagination of results. If you have * more than MaxItems distributions that satisfy the request, the * response includes a NextMarker element. To get the next page of * results, submit another request. For the value of Marker, specify the * value of NextMarker from the last response. (For the first request, * omit Marker.) */ public String getMarker() { return marker; } /** * Use Marker and MaxItems to control pagination of results. If you have * more than MaxItems distributions that satisfy the request, the * response includes a NextMarker element. To get the next page of * results, submit another request. For the value of Marker, specify the * value of NextMarker from the last response. (For the first request, * omit Marker.) * * @param marker Use Marker and MaxItems to control pagination of results. If you have * more than MaxItems distributions that satisfy the request, the * response includes a NextMarker element. To get the next page of * results, submit another request. For the value of Marker, specify the * value of NextMarker from the last response. (For the first request, * omit Marker.) */ public void setMarker(String marker) { this.marker = marker; } /** * Use Marker and MaxItems to control pagination of results. If you have * more than MaxItems distributions that satisfy the request, the * response includes a NextMarker element. To get the next page of * results, submit another request. For the value of Marker, specify the * value of NextMarker from the last response. (For the first request, * omit Marker.) * <p> * Returns a reference to this object so that method calls can be chained together. * * @param marker Use Marker and MaxItems to control pagination of results. If you have * more than MaxItems distributions that satisfy the request, the * response includes a NextMarker element. To get the next page of * results, submit another request. For the value of Marker, specify the * value of NextMarker from the last response. (For the first request, * omit Marker.) * * @return A reference to this updated object so that method calls can be chained * together. */ public ListDistributionsByWebACLIdRequest withMarker(String marker) { this.marker = marker; return this; } /** * The maximum number of distributions that you want CloudFront to return * in the response body. The maximum and default values are both 100. * * @return The maximum number of distributions that you want CloudFront to return * in the response body. The maximum and default values are both 100. */ public String getMaxItems() { return maxItems; } /** * The maximum number of distributions that you want CloudFront to return * in the response body. The maximum and default values are both 100. * * @param maxItems The maximum number of distributions that you want CloudFront to return * in the response body. The maximum and default values are both 100. */ public void setMaxItems(String maxItems) { this.maxItems = maxItems; } /** * The maximum number of distributions that you want CloudFront to return * in the response body. The maximum and default values are both 100. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param maxItems The maximum number of distributions that you want CloudFront to return * in the response body. The maximum and default values are both 100. * * @return A reference to this updated object so that method calls can be chained * together. */ public ListDistributionsByWebACLIdRequest withMaxItems(String maxItems) { this.maxItems = maxItems; return this; } /** * The Id of the AWS WAF web ACL for which you want to list the * associated distributions. If you specify "null" for the Id, the * request returns a list of the distributions that aren't associated * with a web ACL. * * @return The Id of the AWS WAF web ACL for which you want to list the * associated distributions. If you specify "null" for the Id, the * request returns a list of the distributions that aren't associated * with a web ACL. */ public String getWebACLId() { return webACLId; } /** * The Id of the AWS WAF web ACL for which you want to list the * associated distributions. If you specify "null" for the Id, the * request returns a list of the distributions that aren't associated * with a web ACL. * * @param webACLId The Id of the AWS WAF web ACL for which you want to list the * associated distributions. If you specify "null" for the Id, the * request returns a list of the distributions that aren't associated * with a web ACL. */ public void setWebACLId(String webACLId) { this.webACLId = webACLId; } /** * The Id of the AWS WAF web ACL for which you want to list the * associated distributions. If you specify "null" for the Id, the * request returns a list of the distributions that aren't associated * with a web ACL. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param webACLId The Id of the AWS WAF web ACL for which you want to list the * associated distributions. If you specify "null" for the Id, the * request returns a list of the distributions that aren't associated * with a web ACL. * * @return A reference to this updated object so that method calls can be chained * together. */ public ListDistributionsByWebACLIdRequest withWebACLId(String webACLId) { this.webACLId = webACLId; return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getMarker() != null) sb.append("Marker: " + getMarker() + ","); if (getMaxItems() != null) sb.append("MaxItems: " + getMaxItems() + ","); if (getWebACLId() != null) sb.append("WebACLId: " + getWebACLId() ); sb.append("}"); return sb.toString(); } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getMarker() == null) ? 0 : getMarker().hashCode()); hashCode = prime * hashCode + ((getMaxItems() == null) ? 0 : getMaxItems().hashCode()); hashCode = prime * hashCode + ((getWebACLId() == null) ? 0 : getWebACLId().hashCode()); return hashCode; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ListDistributionsByWebACLIdRequest == false) return false; ListDistributionsByWebACLIdRequest other = (ListDistributionsByWebACLIdRequest)obj; if (other.getMarker() == null ^ this.getMarker() == null) return false; if (other.getMarker() != null && other.getMarker().equals(this.getMarker()) == false) return false; if (other.getMaxItems() == null ^ this.getMaxItems() == null) return false; if (other.getMaxItems() != null && other.getMaxItems().equals(this.getMaxItems()) == false) return false; if (other.getWebACLId() == null ^ this.getWebACLId() == null) return false; if (other.getWebACLId() != null && other.getWebACLId().equals(this.getWebACLId()) == false) return false; return true; } @Override public ListDistributionsByWebACLIdRequest clone() { return (ListDistributionsByWebACLIdRequest) super.clone(); } }
apache-2.0
mybatis/mybatis-spring-boot
mybatis-spring-boot-samples/mybatis-spring-boot-sample-xml/src/test/java/sample/mybatis/dao/CityDaoTest.java
1563
/** * Copyright 2015-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package sample.mybatis.dao; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mybatis.spring.boot.test.autoconfigure.MybatisTest; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Import; import org.springframework.test.context.junit.jupiter.SpringExtension; import sample.mybatis.domain.City; import static org.assertj.core.api.Assertions.assertThat; /** * Tests for {@link CityDao}. * @author wonwoo * @since 1.2.1 */ @ExtendWith(SpringExtension.class) @MybatisTest @Import(CityDao.class) class CityDaoTest { @Autowired private CityDao cityDao; @Test void selectCityByIdTest() { City city = cityDao.selectCityById(1); assertThat(city.getName()).isEqualTo("San Francisco"); assertThat(city.getState()).isEqualTo("CA"); assertThat(city.getCountry()).isEqualTo("US"); } }
apache-2.0
mbrukman/gcloud-java
google-cloud-firestore/src/main/java/com/google/cloud/firestore/DocumentMask.java
2638
/* * Copyright 2017 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.firestore; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.SortedSet; import java.util.TreeSet; /** A DocumentMask contains the field paths affected by an update. */ final class DocumentMask { static final DocumentMask EMPTY_MASK = new DocumentMask(new TreeSet<FieldPath>()); private final SortedSet<FieldPath> fieldPaths; // Sorted for testing. DocumentMask(Collection<FieldPath> fieldPaths) { this(new TreeSet<>(fieldPaths)); } private DocumentMask(SortedSet<FieldPath> fieldPaths) { this.fieldPaths = fieldPaths; } static DocumentMask fromObject(Map<String, Object> values) { List<FieldPath> fieldPaths = extractFromMap(values, FieldPath.empty()); return new DocumentMask(fieldPaths); } private static List<FieldPath> extractFromMap(Map<String, Object> values, FieldPath path) { List<FieldPath> fieldPaths = new ArrayList<>(); for (Map.Entry<String, Object> entry : values.entrySet()) { Object value = entry.getValue(); FieldPath childPath = path.append(FieldPath.of(entry.getKey())); if (entry.getValue() == FieldValue.SERVER_TIMESTAMP_SENTINEL) { // Ignore } else if (entry.getValue() == FieldValue.DELETE_SENTINEL) { fieldPaths.add(childPath); } else if (value instanceof Map) { fieldPaths.addAll(extractFromMap((Map<String, Object>) value, childPath)); } else { // We don't need to special case arrays here as we don't support partial array updates. fieldPaths.add(childPath); } } return fieldPaths; } com.google.firestore.v1beta1.DocumentMask toPb() { com.google.firestore.v1beta1.DocumentMask.Builder updateMask = com.google.firestore.v1beta1.DocumentMask.newBuilder(); for (FieldPath fieldPath : fieldPaths) { updateMask.addFieldPaths(fieldPath.getEncodedPath()); } return updateMask.build(); } boolean isEmpty() { return fieldPaths.isEmpty(); } }
apache-2.0
raja15792/googleads-java-lib
examples/dfp_axis/src/main/java/dfp/axis/v201505/creativetemplateservice/GetAllCreativeTemplates.java
3338
// Copyright 2015 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package dfp.axis.v201505.creativetemplateservice; import com.google.api.ads.common.lib.auth.OfflineCredentials; import com.google.api.ads.common.lib.auth.OfflineCredentials.Api; import com.google.api.ads.dfp.axis.factory.DfpServices; import com.google.api.ads.dfp.axis.utils.v201505.StatementBuilder; import com.google.api.ads.dfp.axis.v201505.CreativeTemplate; import com.google.api.ads.dfp.axis.v201505.CreativeTemplatePage; import com.google.api.ads.dfp.axis.v201505.CreativeTemplateServiceInterface; import com.google.api.ads.dfp.lib.client.DfpSession; import com.google.api.client.auth.oauth2.Credential; /** * This example gets all creative templates. * * Credentials and properties in {@code fromFile()} are pulled from the * "ads.properties" file. See README for more info. */ public class GetAllCreativeTemplates { public static void runExample(DfpServices dfpServices, DfpSession session) throws Exception { // Get the CreativeTemplateService. CreativeTemplateServiceInterface creativeTemplateService = dfpServices.get(session, CreativeTemplateServiceInterface.class); // Create a statement to get all creative templates. StatementBuilder statementBuilder = new StatementBuilder() .orderBy("id ASC") .limit(StatementBuilder.SUGGESTED_PAGE_LIMIT); // Default for total result set size. int totalResultSetSize = 0; do { // Get creative templates by statement. CreativeTemplatePage page = creativeTemplateService.getCreativeTemplatesByStatement(statementBuilder.toStatement()); if (page.getResults() != null) { totalResultSetSize = page.getTotalResultSetSize(); int i = page.getStartIndex(); for (CreativeTemplate creativeTemplate : page.getResults()) { System.out.printf("%d) Creative template with ID \"%d\" and name \"%s\" was found.\n", i++, creativeTemplate.getId(), creativeTemplate.getName()); } } statementBuilder.increaseOffsetBy(StatementBuilder.SUGGESTED_PAGE_LIMIT); } while (statementBuilder.getOffset() < totalResultSetSize); System.out.printf("Number of results found: %d\n", totalResultSetSize); } public static void main(String[] args) throws Exception { // Generate a refreshable OAuth2 credential. Credential oAuth2Credential = new OfflineCredentials.Builder() .forApi(Api.DFP) .fromFile() .build() .generateCredential(); // Construct a DfpSession. DfpSession session = new DfpSession.Builder() .fromFile() .withOAuth2Credential(oAuth2Credential) .build(); DfpServices dfpServices = new DfpServices(); runExample(dfpServices, session); } }
apache-2.0
turbine-rpowers/gocd-add-agent-sandbox-config
plugin-infra/go-plugin-infra/test/com/thoughtworks/go/plugin/infra/FelixGoPluginOSGiFrameworkTest.java
25359
/*************************GO-LICENSE-START********************************* * Copyright 2014 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *************************GO-LICENSE-END***********************************/ package com.thoughtworks.go.plugin.infra; import java.util.ArrayList; import java.util.Arrays; import java.util.Dictionary; import java.util.List; import com.thoughtworks.go.util.SystemEnvironment; import com.thoughtworks.go.plugin.infra.plugininfo.GoPluginDescriptor; import com.thoughtworks.go.plugin.infra.plugininfo.PluginRegistry; import com.thoughtworks.go.plugin.infra.service.DefaultPluginHealthService; import com.thoughtworks.go.plugin.infra.service.DefaultPluginLoggingService; import com.thoughtworks.go.plugin.internal.api.LoggingService; import com.thoughtworks.go.plugin.internal.api.PluginHealthService; import org.junit.Before; import org.junit.Test; import org.mockito.InOrder; import org.mockito.Matchers; import org.mockito.Mock; import org.osgi.framework.Bundle; import org.osgi.framework.BundleContext; import org.osgi.framework.BundleException; import org.osgi.framework.Constants; import org.osgi.framework.InvalidSyntaxException; import org.osgi.framework.ServiceReference; import org.osgi.framework.launch.Framework; import org.osgi.framework.launch.FrameworkFactory; import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import static org.mockito.Matchers.any; import static org.mockito.Matchers.eq; import static org.mockito.Matchers.isNull; import static org.mockito.Mockito.atLeastOnce; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.inOrder; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.verifyZeroInteractions; import static org.mockito.Mockito.when; import static org.mockito.MockitoAnnotations.initMocks; public class FelixGoPluginOSGiFrameworkTest { public static final String TEST_SYMBOLIC_NAME = "testplugin.descriptorValidator"; private final GoPluginDescriptor descriptor = buildExpectedDescriptor(); @Mock private BundleContext bundleContext; @Mock private Bundle bundle; @Mock private Framework framework; @Mock private PluginRegistry registry; @Mock private SystemEnvironment systemEnvironment; private FelixGoPluginOSGiFramework spy; @Before public void setUp() throws Exception { initMocks(this); FelixGoPluginOSGiFramework goPluginOSGiFramwork = new FelixGoPluginOSGiFramework(registry, systemEnvironment); spy = spy(goPluginOSGiFramwork); when(framework.getBundleContext()).thenReturn(bundleContext); when(registry.getPlugin(TEST_SYMBOLIC_NAME)).thenReturn(descriptor); doReturn(framework).when(spy).getFelixFramework(Matchers.<List<FrameworkFactory>>anyObject()); } @Test public void shouldRegisterAnInstanceOfEachOfTheRequiredPluginServicesAfterOSGiFrameworkIsInitialized() { spy.start(); verify(bundleContext).registerService(eq(PluginHealthService.class), any(DefaultPluginHealthService.class), isNull(Dictionary.class)); verify(bundleContext).registerService(eq(LoggingService.class), any(DefaultPluginLoggingService.class), isNull(Dictionary.class)); } @Test public void shouldRunAnActionOnAllRegisteredImplementationsOfAGivenInterface() throws Exception { SomeInterface firstService = mock(SomeInterface.class); SomeInterface secondService = mock(SomeInterface.class); registerServices(firstService, secondService); spy.start(); spy.doOnAll(SomeInterface.class, new Action<SomeInterface>() { public void execute(SomeInterface obj, GoPluginDescriptor pluginDescriptor) { obj.someMethod(); assertThat(pluginDescriptor, is(descriptor)); } }); verify(firstService).someMethod(); verify(secondService).someMethod(); verifyNoMoreInteractions(firstService, secondService); } @Test public void shouldFailWithAnExceptionWhenAnExceptionHandlerIsNotProvided() throws Exception { SomeInterface firstService = mock(SomeInterface.class); SomeInterface secondService = mock(SomeInterface.class); SomeInterface thirdService = mock(SomeInterface.class); registerServices(firstService, secondService, thirdService); spy.start(); RuntimeException exceptionToBeThrown = new RuntimeException("Ouch!"); doThrow(exceptionToBeThrown).when(secondService).someMethod(); try { spy.doOnAll(SomeInterface.class, new Action<SomeInterface>() { public void execute(SomeInterface obj, GoPluginDescriptor pluginDescriptor) { obj.someMethod(); assertThat(pluginDescriptor, is(descriptor)); } }); } catch (RuntimeException e) { assertThat(e.getMessage(), is("Ouch!")); assertThat(e.getCause().getMessage(), is("Ouch!")); } verify(firstService).someMethod(); verify(secondService).someMethod(); verifyZeroInteractions(thirdService); verifyNoMoreInteractions(firstService, secondService); } @Test public void shouldAllowHandlingExceptionsDuringRunningOfAnActionOnAllRegisteredImplementationsOfAGivenInterface() throws Exception { SomeInterface firstService = mock(SomeInterface.class); SomeInterface secondService = mock(SomeInterface.class); SomeInterface thirdService = mock(SomeInterface.class); registerServices(firstService, secondService, thirdService); spy.start(); RuntimeException exceptionToBeThrown = new RuntimeException("Ouch!"); ExceptionHandler<SomeInterface> exceptionHandler = mock(ExceptionHandler.class); doThrow(exceptionToBeThrown).when(secondService).someMethod(); spy.doOnAllWithExceptionHandling(SomeInterface.class, new Action<SomeInterface>() { public void execute(SomeInterface obj, GoPluginDescriptor pluginDescriptor) { obj.someMethod(); assertThat(pluginDescriptor, is(descriptor)); } }, exceptionHandler); InOrder inOrder = inOrder(firstService, secondService, thirdService, exceptionHandler); inOrder.verify(firstService).someMethod(); inOrder.verify(secondService).someMethod(); inOrder.verify(exceptionHandler).handleException(secondService, exceptionToBeThrown); inOrder.verify(thirdService).someMethod(); verifyNoMoreInteractions(exceptionHandler, firstService, secondService, thirdService); } @Test public void shouldDoNothingWhenTryingToRunOnAllImplementationsIfPluginsAreNotEnabled() throws Exception { SomeInterface firstService = mock(SomeInterface.class); registerServices(firstService); spy.doOnAll(SomeInterface.class, new Action<SomeInterface>() { public void execute(SomeInterface obj, GoPluginDescriptor pluginDescriptor) { obj.someMethod(); assertThat(pluginDescriptor, is(descriptor)); } }); verifyZeroInteractions(firstService); ExceptionHandler exceptionHandler = mock(ExceptionHandler.class); spy.doOnAllWithExceptionHandling(SomeInterface.class, new Action<SomeInterface>() { public void execute(SomeInterface obj, GoPluginDescriptor pluginDescriptor) { obj.someMethod(); } }, exceptionHandler); verifyZeroInteractions(firstService, exceptionHandler); } @Test public void doOnShouldRunAnActionOnSpecifiedPluginImplementationsOfAGivenInterface() throws Exception { SomeInterface firstService = mock(SomeInterface.class); SomeInterface secondService = mock(SomeInterface.class); registerServices(firstService, secondService); spy.start(); spy.doOn(SomeInterface.class, secondService.toString(), new ActionWithReturn<SomeInterface, Object>() { @Override public Object execute(SomeInterface obj, GoPluginDescriptor pluginDescriptor) { assertThat(pluginDescriptor, is(descriptor)); return obj.someMethodWithReturn(); } }); spy.doOn(SomeInterface.class, secondService.toString(), new Action<SomeInterface>() { @Override public void execute(SomeInterface obj, GoPluginDescriptor pluginDescriptor) { assertThat(pluginDescriptor, is(descriptor)); obj.someMethod(); } }); spy.doOnWithExceptionHandling(SomeInterface.class, secondService.toString(), new Action<SomeInterface>() { @Override public void execute(SomeInterface obj, GoPluginDescriptor pluginDescriptor) { assertThat(pluginDescriptor, is(descriptor)); obj.someMethod(); } }, new ExceptionHandler<SomeInterface>() { @Override public void handleException(SomeInterface obj, Throwable t) { } } ); verify(firstService, never()).someMethodWithReturn(); verify(secondService).someMethodWithReturn(); verify(secondService, times(2)).someMethod(); verifyNoMoreInteractions(firstService, secondService); } @Test public void doOnExceptionHandlingShouldRunAnActionOnSpecifiedPluginImplementationsOfAGivenInterfaceAndDelegateTheExceptionToTheHandler() throws Exception { SomeInterface firstService = mock(SomeInterface.class); SomeInterface secondService = mock(SomeInterface.class); registerServices(firstService, secondService); spy.start(); final RuntimeException expectedException = new RuntimeException("Exception Thrown By Spy Method"); spy.doOnWithExceptionHandling(SomeInterface.class, secondService.toString(), new Action<SomeInterface>() { @Override public void execute(SomeInterface obj, GoPluginDescriptor pluginDescriptor) { assertThat(pluginDescriptor, is(descriptor)); obj.someMethod(); throw expectedException; } }, new ExceptionHandler<SomeInterface>() { @Override public void handleException(SomeInterface obj, Throwable t) { assertThat((RuntimeException) t, is(expectedException)); } } ); verify(firstService, never()).someMethodWithReturn(); verify(secondService, never()).someMethodWithReturn(); verify(secondService).someMethod(); verifyNoMoreInteractions(firstService, secondService); } @Test public void doOnShouldThrowAnExceptionWhenThereAreMultipleServicesWithSamePluginId_IdeallyThisShouldNotHappenInProductionSincePluginIdIsSymbolicName() throws Exception { SomeInterface firstService = mock(SomeInterface.class); SomeInterface secondService = mock(SomeInterface.class); String symbolicName = "same_symbolic_name"; registerServicesWithSameSymbolicName(symbolicName, firstService, secondService); spy.start(); try { spy.doOn(SomeInterface.class, symbolicName, new ActionWithReturn<SomeInterface, Object>() { @Override public Object execute(SomeInterface obj, GoPluginDescriptor pluginDescriptor) { assertThat(pluginDescriptor, is(descriptor)); return obj.someMethodWithReturn(); } }); fail("Should throw plugin framework exception"); } catch (GoPluginFrameworkException ex) { assertThat(ex.getMessage().startsWith("More than one reference found"), is(true)); assertThat(ex.getMessage().contains(SomeInterface.class.getCanonicalName()), is(true)); assertThat(ex.getMessage().contains(symbolicName), is(true)); } try { spy.doOn(SomeInterface.class, symbolicName, new Action<SomeInterface>() { @Override public void execute(SomeInterface obj, GoPluginDescriptor pluginDescriptor) { assertThat(pluginDescriptor, is(descriptor)); obj.someMethod(); } }); fail("Should throw plugin framework exception"); } catch (GoPluginFrameworkException ex) { assertThat(ex.getMessage().startsWith("More than one reference found"), is(true)); assertThat(ex.getMessage().contains(SomeInterface.class.getCanonicalName()), is(true)); assertThat(ex.getMessage().contains(symbolicName), is(true)); } try { spy.doOnWithExceptionHandling(SomeInterface.class, symbolicName, new Action<SomeInterface>() { @Override public void execute(SomeInterface obj, GoPluginDescriptor pluginDescriptor) { assertThat(pluginDescriptor, is(descriptor)); obj.someMethod(); } }, new ExceptionHandler<SomeInterface>() { @Override public void handleException(SomeInterface obj, Throwable t) { } } ); fail("Should throw plugin framework exception"); } catch (GoPluginFrameworkException ex) { assertThat(ex.getMessage().startsWith("More than one reference found"), is(true)); assertThat(ex.getMessage().contains(SomeInterface.class.getCanonicalName()), is(true)); assertThat(ex.getMessage().contains(symbolicName), is(true)); } verify(firstService, never()).someMethodWithReturn(); verify(secondService, never()).someMethodWithReturn(); verify(secondService, never()).someMethod(); verifyNoMoreInteractions(firstService, secondService); } @Test public void doOnShouldThrowAnExceptionWhenThereAreNoServicesAreFoundForTheGivenFilterAndServiceReference() throws Exception { SomeInterface firstService = mock(SomeInterface.class); SomeInterface secondService = mock(SomeInterface.class); String symbolicName = "dummy_symbolic_name"; registerServicesWithSameSymbolicName(symbolicName, firstService, secondService); spy.start(); try { spy.doOn(SomeOtherInterface.class, symbolicName, new ActionWithReturn<SomeOtherInterface, Object>() { @Override public Object execute(SomeOtherInterface obj, GoPluginDescriptor pluginDescriptor) { assertThat(pluginDescriptor, is(descriptor)); throw new RuntimeException("Should Not Be invoked"); } }); fail("Should throw plugin framework exception"); } catch (GoPluginFrameworkException ex) { assertThat(ex.getMessage().startsWith("No reference found"), is(true)); assertThat(ex.getMessage().contains(SomeOtherInterface.class.getCanonicalName()), is(true)); assertThat(ex.getMessage().contains(symbolicName), is(true)); } try { spy.doOn(SomeOtherInterface.class, symbolicName, new Action<SomeOtherInterface>() { @Override public void execute(SomeOtherInterface obj, GoPluginDescriptor pluginDescriptor) { assertThat(pluginDescriptor, is(descriptor)); throw new RuntimeException("Should Not Be invoked"); } }); fail("Should throw plugin framework exception"); } catch (GoPluginFrameworkException ex) { assertThat(ex.getMessage().startsWith("No reference found"), is(true)); assertThat(ex.getMessage().contains(SomeOtherInterface.class.getCanonicalName()), is(true)); assertThat(ex.getMessage().contains(symbolicName), is(true)); } try { spy.doOnWithExceptionHandling(SomeOtherInterface.class, symbolicName, new Action<SomeOtherInterface>() { @Override public void execute(SomeOtherInterface obj, GoPluginDescriptor pluginDescriptor) { assertThat(pluginDescriptor, is(descriptor)); throw new RuntimeException("Should Not Be invoked"); } }, new ExceptionHandler<SomeOtherInterface>() { @Override public void handleException(SomeOtherInterface obj, Throwable t) { } } ); fail("Should throw plugin framework exception"); } catch (GoPluginFrameworkException ex) { assertThat(ex.getMessage().startsWith("No reference found"), is(true)); assertThat(ex.getMessage().contains(SomeOtherInterface.class.getCanonicalName()), is(true)); assertThat(ex.getMessage().contains(symbolicName), is(true)); } verify(firstService, never()).someMethodWithReturn(); verify(secondService, never()).someMethodWithReturn(); verify(secondService, never()).someMethod(); verifyNoMoreInteractions(firstService, secondService); } @Test public void doOnAllShouldRunAnActionOnAllPluginExtensionsOfAGivenPluginJar() throws Exception { SomeInterface firstService = mock(SomeInterface.class); SomeInterface secondService = mock(SomeInterface.class); String symbolicName = "same_symbolic_name"; registerServicesWithSameSymbolicName(symbolicName, firstService, secondService); spy.start(); spy.doOnAllForPlugin(SomeInterface.class, symbolicName, new Action<SomeInterface>() { @Override public void execute(SomeInterface obj, GoPluginDescriptor pluginDescriptor) { assertThat(pluginDescriptor, is(descriptor)); obj.someMethod(); } }); verify(secondService).someMethod(); verify(firstService).someMethod(); verifyNoMoreInteractions(firstService, secondService); } @Test public void doOnAllWithExceptionHandlingShouldRunAnActionOnAllPluginExtensionsOfAGivenPluginJar() throws Exception { SomeInterface firstService = mock(SomeInterface.class); SomeInterface secondService = mock(SomeInterface.class); String symbolicName = "same_symbolic_name"; registerServicesWithSameSymbolicName(symbolicName, firstService, secondService); spy.start(); spy.doOnAllWithExceptionHandlingForPlugin(SomeInterface.class, symbolicName, new Action<SomeInterface>() { @Override public void execute(SomeInterface obj, GoPluginDescriptor pluginDescriptor) { assertThat(pluginDescriptor, is(descriptor)); obj.someMethod(); throw new RuntimeException("Dummy Exception"); } }, new ExceptionHandler<SomeInterface>() { @Override public void handleException(SomeInterface obj, Throwable t) { } } ); verify(secondService).someMethod(); verify(firstService).someMethod(); verifyNoMoreInteractions(firstService, secondService); } private void registerServicesWithSameSymbolicName(String symbolicName, SomeInterface... someInterfaces) throws InvalidSyntaxException { ArrayList<ServiceReference<SomeInterface>> references = new ArrayList<ServiceReference<SomeInterface>>(); for (int i = 0; i < someInterfaces.length; ++i) { ServiceReference reference = mock(ServiceReference.class); Bundle bundle = mock(Bundle.class); when(reference.getBundle()).thenReturn(bundle); when(bundle.getSymbolicName()).thenReturn(TEST_SYMBOLIC_NAME); when(bundleContext.getService(reference)).thenReturn(someInterfaces[i]); references.add(reference); } String propertyFormat = String.format("(%s=%s)", Constants.BUNDLE_SYMBOLICNAME, symbolicName); when(bundleContext.getServiceReferences(SomeInterface.class, propertyFormat)).thenReturn(references); } @Test public void HasReferencesShouldReturnAppropriateValueIfSpecifiedPluginImplementationsOfAGivenInterfaceIsFoundOrNotFound() throws Exception { SomeInterface firstService = mock(SomeInterface.class); SomeInterface secondService = mock(SomeInterface.class); spy.start(); boolean reference = spy.hasReferenceFor(SomeInterface.class, secondService.toString()); assertThat(reference,is(false)); registerServices(firstService, secondService); reference = spy.hasReferenceFor(SomeInterface.class, secondService.toString()); assertThat(reference,is(true)); verifyNoMoreInteractions(firstService, secondService); } @Test public void shouldUnloadAPlugin() throws BundleException { GoPluginDescriptor pluginDescriptor = mock(GoPluginDescriptor.class); Bundle bundle = mock(Bundle.class); when(pluginDescriptor.bundle()).thenReturn(bundle); spy.unloadPlugin(pluginDescriptor); verify(bundle, atLeastOnce()).stop(); verify(bundle, atLeastOnce()).uninstall(); } @Test public void shouldUnloadAnInvalidPlugin() throws BundleException { GoPluginDescriptor pluginDescriptor = mock(GoPluginDescriptor.class); Bundle bundle = mock(Bundle.class); when(pluginDescriptor.bundle()).thenReturn(bundle); when(pluginDescriptor.isInvalid()).thenReturn(true); spy.unloadPlugin(pluginDescriptor); verify(bundle, atLeastOnce()).stop(); verify(bundle, atLeastOnce()).uninstall(); } @Test public void shouldNotUnloadBundleForAnUnloadedInvalidPlugin() throws BundleException { GoPluginDescriptor pluginDescriptor = mock(GoPluginDescriptor.class); when(pluginDescriptor.bundle()).thenReturn(null); spy.unloadPlugin(pluginDescriptor); } private void registerServices(SomeInterface... someInterfaces) throws InvalidSyntaxException { ArrayList<ServiceReference<SomeInterface>> references = new ArrayList<ServiceReference<SomeInterface>>(); for (int i = 0; i < someInterfaces.length; ++i) { ServiceReference reference = mock(ServiceReference.class); when(reference.getBundle()).thenReturn(bundle); when(bundle.getSymbolicName()).thenReturn(TEST_SYMBOLIC_NAME); when(bundleContext.getService(reference)).thenReturn(someInterfaces[i]); setExpectationForFilterBasedServiceReferenceCall(someInterfaces[i], reference); references.add(reference); } when(bundleContext.getServiceReferences(SomeInterface.class, null)).thenReturn(references); } private void setExpectationForFilterBasedServiceReferenceCall(SomeInterface service, ServiceReference reference) throws InvalidSyntaxException { ArrayList<ServiceReference<SomeInterface>> references = new ArrayList<ServiceReference<SomeInterface>>(); String propertyFormat = String.format("(%s=%s)", Constants.BUNDLE_SYMBOLICNAME, service.toString()); references.add(reference); when(bundleContext.getServiceReferences(SomeInterface.class, propertyFormat)).thenReturn(references); } private GoPluginDescriptor buildExpectedDescriptor() { return new GoPluginDescriptor(TEST_SYMBOLIC_NAME, "1", new GoPluginDescriptor.About("Plugin Descriptor Validator", "1.0.1", "12.4", "Validates its own plugin descriptor", new GoPluginDescriptor.Vendor("ThoughtWorks Go Team", "www.thoughtworks.com"), Arrays.asList("Linux", "Windows")), null, null, true ); } private interface SomeInterface { void someMethod(); Object someMethodWithReturn(); } private interface SomeOtherInterface { } }
apache-2.0
trivium-io/trivium-core
src/io/trivium/dep/org/iq80/leveldb/util/FileUtils.java
5903
/* * Copyright (C) 2011 the original author or authors. * See the notice.md file distributed with this work for additional * information regarding copyright ownership. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.trivium.dep.org.iq80.leveldb.util; import io.trivium.dep.com.google.common.base.Preconditions; import io.trivium.dep.com.google.common.collect.ImmutableList; import io.trivium.dep.com.google.common.io.Files; import java.io.File; import java.io.FilenameFilter; import java.io.IOException; public final class FileUtils { private static final int TEMP_DIR_ATTEMPTS = 10000; private FileUtils() { } public static boolean isSymbolicLink(File file) { try { File canonicalFile = file.getCanonicalFile(); File absoluteFile = file.getAbsoluteFile(); File parentFile = file.getParentFile(); // a symbolic link has a different name between the canonical and absolute path return !canonicalFile.getName().equals(absoluteFile.getName()) || // or the canonical parent path is not the same as the file's parent path, // provided the file has a parent path parentFile != null && !parentFile.getCanonicalPath().equals(canonicalFile.getParent()); } catch (IOException e) { // error on the side of caution return true; } } public static ImmutableList<File> listFiles(File dir) { File[] files = dir.listFiles(); if (files == null) { return ImmutableList.of(); } return ImmutableList.copyOf(files); } public static ImmutableList<File> listFiles(File dir, FilenameFilter filter) { File[] files = dir.listFiles(filter); if (files == null) { return ImmutableList.of(); } return ImmutableList.copyOf(files); } public static File createTempDir(String prefix) { return createTempDir(new File(System.getProperty("java.io.tmpdir")), prefix); } public static File createTempDir(File parentDir, String prefix) { String baseName = ""; if (prefix != null) { baseName += prefix + "-"; } baseName += System.currentTimeMillis() + "-"; for (int counter = 0; counter < TEMP_DIR_ATTEMPTS; counter++) { File tempDir = new File(parentDir, baseName + counter); if (tempDir.mkdir()) { return tempDir; } } throw new IllegalStateException("Failed to create directory within " + TEMP_DIR_ATTEMPTS + " attempts (tried " + baseName + "0 to " + baseName + (TEMP_DIR_ATTEMPTS - 1) + ')'); } public static boolean deleteDirectoryContents(File directory) { Preconditions.checkArgument(directory.isDirectory(), "Not a directory: %s", directory); // Don't delete symbolic link directories if (isSymbolicLink(directory)) { return false; } boolean success = true; for (File file : listFiles(directory)) { success = deleteRecursively(file) && success; } return success; } public static boolean deleteRecursively(File file) { boolean success = true; if (file.isDirectory()) { success = deleteDirectoryContents(file); } return file.delete() && success; } public static boolean copyDirectoryContents(File src, File target) { Preconditions.checkArgument(src.isDirectory(), "Source dir is not a directory: %s", src); // Don't delete symbolic link directories if (isSymbolicLink(src)) { return false; } target.mkdirs(); Preconditions.checkArgument(target.isDirectory(), "Target dir is not a directory: %s", src); boolean success = true; for (File file : listFiles(src)) { success = copyRecursively(file, new File(target, file.getName())) && success; } return success; } public static boolean copyRecursively(File src, File target) { if (src.isDirectory()) { return copyDirectoryContents(src, target); } else { try { Files.copy(src, target); return true; } catch (IOException e) { return false; } } } public static File newFile(String parent, String... paths) { Preconditions.checkNotNull(parent, "parent is null"); Preconditions.checkNotNull(paths, "paths is null"); return newFile(new File(parent), ImmutableList.copyOf(paths)); } public static File newFile(File parent, String... paths) { Preconditions.checkNotNull(parent, "parent is null"); Preconditions.checkNotNull(paths, "paths is null"); return newFile(parent, ImmutableList.copyOf(paths)); } public static File newFile(File parent, Iterable<String> paths) { Preconditions.checkNotNull(parent, "parent is null"); Preconditions.checkNotNull(paths, "paths is null"); File result = parent; for (String path : paths) { result = new File(result, path); } return result; } }
apache-2.0
R0g3r10LL31t3/AccountManager-ROLLSoftware
src/java/com/rollsoftware/br/common/concurrent/PriorityThreadPoolExecutor.java
5318
/* * Copyright 2016-2026 Rogério Lecarião Leite * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * CEO 2016: Rogério Lecarião Leite; ROLL Software */ package com.rollsoftware.br.common.concurrent; import java.util.concurrent.BlockingQueue; import java.util.concurrent.Callable; import java.util.concurrent.FutureTask; import java.util.concurrent.RejectedExecutionHandler; import java.util.concurrent.RunnableFuture; import java.util.concurrent.ThreadFactory; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; /** * * @author Rogério * @date December, 2016 */ public class PriorityThreadPoolExecutor extends ThreadPoolExecutor { public PriorityThreadPoolExecutor( int corePoolSize, int maximumPoolSize, long keepAliveTime, TimeUnit unit, BlockingQueue<Runnable> workQueue) { super(corePoolSize, maximumPoolSize, keepAliveTime, unit, workQueue); } public PriorityThreadPoolExecutor( int corePoolSize, int maximumPoolSize, long keepAliveTime, TimeUnit unit, BlockingQueue<Runnable> workQueue, ThreadFactory threadFactory) { super(corePoolSize, maximumPoolSize, keepAliveTime, unit, workQueue, threadFactory); } public PriorityThreadPoolExecutor( int corePoolSize, int maximumPoolSize, long keepAliveTime, TimeUnit unit, BlockingQueue<Runnable> workQueue, RejectedExecutionHandler handler) { super(corePoolSize, maximumPoolSize, keepAliveTime, unit, workQueue, handler); } public PriorityThreadPoolExecutor( int corePoolSize, int maximumPoolSize, long keepAliveTime, TimeUnit unit, BlockingQueue<Runnable> workQueue, ThreadFactory threadFactory, RejectedExecutionHandler handler) { super(corePoolSize, maximumPoolSize, keepAliveTime, unit, workQueue, threadFactory, handler); } @Override protected <T> RunnableFuture<T> newTaskFor(Callable<T> callable) { int priority = 0; if (callable instanceof PriorityWorker) { priority = ((PriorityWorker) callable).getPriority(); } return new FutureTaskWithPriority(priority, callable); } @Override protected <T> RunnableFuture<T> newTaskFor(Runnable runnable, T value) { return new FutureTaskWithPriority(0, runnable, value); } private static class FutureTaskWithPriority<T> extends FutureTask<T> implements Comparable { private final int priority; public FutureTaskWithPriority( int priority, Runnable runnable, T result) { super(runnable, result); this.priority = priority; } public FutureTaskWithPriority(int priority, Callable callable) { super(callable); this.priority = priority; } public int getPriority() { return priority; } @Override public int compareTo(Object o) { if (o == null) { return -1; } else if (o instanceof FutureTaskWithPriority) { FutureTaskWithPriority other = (FutureTaskWithPriority) o; int p1 = this.getPriority(); int p2 = other.getPriority(); return p1 > p2 ? 1 : (p1 == p2 ? 0 : -1); } return 0; } } public static class PriorityWorker<T> implements Callable<T> { private final int priority; private final Object worker; public PriorityWorker(int priority, Callable<T> callable) { this.priority = priority; worker = callable; } public PriorityWorker(int priority, Runnable callable) { this.priority = priority; worker = callable; } public int getPriority() { return priority; } @Override public T call() throws Exception { if (worker instanceof Callable) { return ((Callable<T>) worker).call(); } else if (worker instanceof Runnable) { ((Runnable) worker).run(); } return null; } } public static enum Priority { HIGHEST(0), HIGH(1), MEDIUM(2), LOW(3), LOWEST(4); private final int value; private Priority(int value) { this.value = value; } public int getValue() { return value; } } }
apache-2.0
jwren/intellij-community
python/python-psi-impl/src/com/jetbrains/python/refactoring/inline/PyInlineLocalHandler.java
14259
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.jetbrains.python.refactoring.inline; import com.intellij.codeInsight.TargetElementUtilBase; import com.intellij.codeInsight.controlflow.Instruction; import com.intellij.codeInsight.highlighting.HighlightManager; import com.intellij.lang.Language; import com.intellij.lang.refactoring.InlineActionHandler; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.command.CommandProcessor; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.colors.EditorColors; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.wm.WindowManager; import com.intellij.psi.*; import com.intellij.psi.codeStyle.CodeStyleManager; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.search.searches.ReferencesSearch; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.refactoring.RefactoringBundle; import com.intellij.refactoring.RefactoringUiService; import com.intellij.refactoring.listeners.RefactoringEventData; import com.intellij.refactoring.listeners.RefactoringEventListener; import com.intellij.refactoring.util.CommonRefactoringUtil; import com.intellij.util.Query; import com.intellij.util.containers.ContainerUtil; import com.jetbrains.python.PyPsiBundle; import com.jetbrains.python.PyTokenTypes; import com.jetbrains.python.PythonLanguage; import com.jetbrains.python.codeInsight.controlflow.ScopeOwner; import com.jetbrains.python.psi.*; import com.jetbrains.python.psi.impl.PyPsiUtils; import com.jetbrains.python.refactoring.PyDefUseUtil; import com.jetbrains.python.refactoring.PyReplaceExpressionUtil; import org.jetbrains.annotations.Nls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.ArrayList; import java.util.List; /** * @author Dennis.Ushakov */ public class PyInlineLocalHandler extends InlineActionHandler { private static final Logger LOG = Logger.getInstance(PyInlineLocalHandler.class.getName()); private static final Pair<PyStatement, Boolean> EMPTY_DEF_RESULT = Pair.create(null, false); private static final String HELP_ID = "refactoring.inlineVariable"; public static PyInlineLocalHandler getInstance() { return InlineActionHandler.EP_NAME.findExtensionOrFail(PyInlineLocalHandler.class); } @Override public boolean isEnabledForLanguage(Language l) { return l instanceof PythonLanguage; } @Override public boolean canInlineElement(PsiElement element) { return element instanceof PyTargetExpression; } @Override public void inlineElement(Project project, Editor editor, PsiElement element) { if (editor == null) { return; } final PsiReference psiReference = TargetElementUtilBase.findReferenceWithoutExpectedCaret(editor); PyReferenceExpression refExpr = null; if (psiReference != null) { final PsiElement refElement = psiReference.getElement(); if (refElement instanceof PyReferenceExpression) { refExpr = (PyReferenceExpression)refElement; } } invoke(project, editor, (PyTargetExpression)element, refExpr); } private static void invoke(@NotNull final Project project, @NotNull final Editor editor, @NotNull final PyTargetExpression local, @Nullable PyReferenceExpression refExpr) { if (!CommonRefactoringUtil.checkReadOnlyStatus(project, local)) return; final HighlightManager highlightManager = HighlightManager.getInstance(project); final String localName = local.getName(); final ScopeOwner containerBlock = getContext(local); LOG.assertTrue(containerBlock != null); final Pair<PyStatement, Boolean> defPair = getAssignmentToInline(containerBlock, refExpr, local, project); final PyStatement def = defPair.first; if (def == null || getValue(def) == null) { final String key = defPair.second ? "variable.has.no.dominating.definition" : "variable.has.no.initializer"; final String message = RefactoringBundle.getCannotRefactorMessage(RefactoringBundle.message(key, localName)); CommonRefactoringUtil.showErrorHint(project, editor, message, getRefactoringName(), HELP_ID); return; } if (def instanceof PyAssignmentStatement && ((PyAssignmentStatement)def).getTargets().length > 1) { highlightManager.addOccurrenceHighlights(editor, new PsiElement[]{def}, EditorColors.WRITE_SEARCH_RESULT_ATTRIBUTES, true, null); final String message = RefactoringBundle.getCannotRefactorMessage(PyPsiBundle.message("refactoring.inline.local.multiassignment", localName)); CommonRefactoringUtil.showErrorHint(project, editor, message, getRefactoringName(), HELP_ID); return; } final PsiElement[] refsToInline = PyDefUseUtil.getPostRefs(containerBlock, local, getObject(def)); if (refsToInline.length == 0) { final String message = RefactoringBundle.message("variable.is.never.used", localName); CommonRefactoringUtil.showErrorHint(project, editor, message, getRefactoringName(), HELP_ID); return; } if (!ApplicationManager.getApplication().isUnitTestMode()) { highlightManager.addOccurrenceHighlights(editor, refsToInline, EditorColors.SEARCH_RESULT_ATTRIBUTES, true, null); final int occurrencesCount = refsToInline.length; final String occurrencesString = RefactoringBundle.message("occurrences.string", occurrencesCount); final String question = RefactoringBundle.message("inline.local.variable.prompt", localName) + " " + occurrencesString; boolean result = RefactoringUiService.getInstance().showRefactoringMessageDialog(getRefactoringName(), question, HELP_ID, "OptionPane.questionIcon", true, project); if (!result) { WindowManager.getInstance().getStatusBar(project).setInfo(RefactoringBundle.message("press.escape.to.remove.the.highlighting")); return; } } final PsiFile workingFile = local.getContainingFile(); for (PsiElement ref : refsToInline) { final PsiFile otherFile = ref.getContainingFile(); if (!otherFile.equals(workingFile)) { final String message = RefactoringBundle.message("variable.is.referenced.in.multiple.files", localName); CommonRefactoringUtil.showErrorHint(project, editor, message, getRefactoringName(), HELP_ID); return; } } for (final PsiElement ref : refsToInline) { final List<PsiElement> elems = new ArrayList<>(); final List<Instruction> latestDefs = PyDefUseUtil.getLatestDefs(containerBlock, local.getName(), ref, false, false); for (Instruction i : latestDefs) { elems.add(i.getElement()); } final PsiElement[] defs = elems.toArray(PsiElement.EMPTY_ARRAY); boolean isSameDefinition = true; for (PsiElement otherDef : defs) { isSameDefinition &= isSameDefinition(def, otherDef); } if (!isSameDefinition) { highlightManager.addOccurrenceHighlights(editor, defs, EditorColors.WRITE_SEARCH_RESULT_ATTRIBUTES, true, null); highlightManager.addOccurrenceHighlights(editor, new PsiElement[]{ref}, EditorColors.SEARCH_RESULT_ATTRIBUTES, true, null); final String message = RefactoringBundle.getCannotRefactorMessage( RefactoringBundle.message("variable.is.accessed.for.writing.and.used.with.inlined", localName)); CommonRefactoringUtil.showErrorHint(project, editor, message, getRefactoringName(), HELP_ID); WindowManager.getInstance().getStatusBar(project).setInfo(RefactoringBundle.message("press.escape.to.remove.the.highlighting")); return; } } CommandProcessor.getInstance().executeCommand(project, () -> ApplicationManager.getApplication().runWriteAction(() -> { try { final RefactoringEventData afterData = new RefactoringEventData(); afterData.addElement(local); project.getMessageBus().syncPublisher(RefactoringEventListener.REFACTORING_EVENT_TOPIC) .refactoringStarted(getRefactoringId(), afterData); final PsiElement[] exprs = new PsiElement[refsToInline.length]; final PyExpression value = prepareValue(def, localName, project); final LanguageLevel level = LanguageLevel.forElement(value); final PyExpression withParenthesis = PyElementGenerator.getInstance(project).createExpressionFromText(level, "(" + value.getText() + ")"); final PsiElement lastChild = def.getLastChild(); if (lastChild != null && lastChild.getNode().getElementType() == PyTokenTypes.END_OF_LINE_COMMENT) { final PsiElement parent = def.getParent(); if (parent != null) parent.addBefore(lastChild, def); } for (int i = 0, refsToInlineLength = refsToInline.length; i < refsToInlineLength; i++) { final PsiElement element = refsToInline[i]; if (PyReplaceExpressionUtil.isNeedParenthesis((PyExpression)element, value)) { exprs[i] = element.replace(withParenthesis); } else { exprs[i] = element.replace(value); } } final PsiElement next = def.getNextSibling(); if (next instanceof PsiWhiteSpace) { PyPsiUtils.removeElements(next); } PyPsiUtils.removeElements(def); final List<TextRange> ranges = ContainerUtil.mapNotNull(exprs, element -> { final PyStatement parentalStatement = PsiTreeUtil.getParentOfType(element, PyStatement.class, false); return parentalStatement != null ? parentalStatement.getTextRange() : null; }); PsiDocumentManager.getInstance(project).commitDocument(editor.getDocument()); CodeStyleManager.getInstance(project).reformatText(workingFile, ranges); if (!ApplicationManager.getApplication().isUnitTestMode()) { highlightManager.addOccurrenceHighlights(editor, exprs, EditorColors.SEARCH_RESULT_ATTRIBUTES, true, null); WindowManager.getInstance().getStatusBar(project) .setInfo(RefactoringBundle.message("press.escape.to.remove.the.highlighting")); } } finally { final RefactoringEventData afterData = new RefactoringEventData(); afterData.addElement(local); project.getMessageBus().syncPublisher(RefactoringEventListener.REFACTORING_EVENT_TOPIC) .refactoringDone(getRefactoringId(), afterData); } }), RefactoringBundle.message("inline.command", localName), null); } private static boolean isSameDefinition(PyStatement def, PsiElement otherDef) { if (otherDef instanceof PyTargetExpression) otherDef = otherDef.getParent(); return otherDef == def; } private static ScopeOwner getContext(PyTargetExpression local) { ScopeOwner context = PsiTreeUtil.getParentOfType(local, PyFunction.class); if (context == null) { context = PsiTreeUtil.getParentOfType(local, PyClass.class); } if (context == null) { context = (PyFile)local.getContainingFile(); } return context; } private static Pair<PyStatement, Boolean> getAssignmentToInline(ScopeOwner containerBlock, PyReferenceExpression expr, PyTargetExpression local, Project project) { if (expr != null) { try { final List<Instruction> candidates = PyDefUseUtil.getLatestDefs(containerBlock, local.getName(), expr, true, true); if (candidates.size() == 1) { final PyStatement expression = getAssignmentByLeftPart((PyElement)candidates.get(0).getElement()); return Pair.create(expression, false); } return Pair.create(null, candidates.size() > 0); } catch (PyDefUseUtil.InstructionNotFoundException ignored) { } } final Query<PsiReference> query = ReferencesSearch.search(local, GlobalSearchScope.allScope(project), false); final PsiReference first = query.findFirst(); final PyElement lValue = first != null ? (PyElement)first.resolve() : null; return lValue != null ? Pair.create(getAssignmentByLeftPart(lValue), false) : EMPTY_DEF_RESULT; } @Nullable private static PyStatement getAssignmentByLeftPart(PyElement candidate) { final PsiElement parent = candidate.getParent(); return parent instanceof PyAssignmentStatement || parent instanceof PyAugAssignmentStatement ? (PyStatement)parent : null; } @Nullable private static PyExpression getValue(@Nullable PyStatement def) { if (def == null) return null; if (def instanceof PyAssignmentStatement) { return ((PyAssignmentStatement)def).getAssignedValue(); } return ((PyAugAssignmentStatement)def).getValue(); } @Nullable private static PyExpression getObject(@Nullable PyStatement def) { if (def == null) return null; if (def instanceof PyAssignmentStatement) { return ((PyAssignmentStatement)def).getTargets()[0]; } return ((PyAugAssignmentStatement)def).getTarget(); } @NotNull private static PyExpression prepareValue(@NotNull PyStatement def, @NotNull String localName, @NotNull Project project) { final PyExpression value = getValue(def); assert value != null; if (def instanceof PyAugAssignmentStatement) { final PyAugAssignmentStatement expression = (PyAugAssignmentStatement)def; final PsiElement operation = expression.getOperation(); assert operation != null; final String op = operation.getText().replace('=', ' '); final LanguageLevel level = LanguageLevel.forElement(value); return PyElementGenerator.getInstance(project).createExpressionFromText(level, localName + " " + op + value.getText() + ")"); } return value; } public static String getRefactoringId() { return "refactoring.python.inline.local"; } private static @Nls(capitalization = Nls.Capitalization.Title) String getRefactoringName() { return RefactoringBundle.message("inline.variable.title"); } }
apache-2.0
quarian/dataverse
src/main/java/edu/harvard/iq/dataverse/authorization/AuthenticationServiceBean.java
21503
package edu.harvard.iq.dataverse.authorization; import edu.harvard.iq.dataverse.search.IndexServiceBean; import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord; import edu.harvard.iq.dataverse.actionlogging.ActionLogServiceBean; import edu.harvard.iq.dataverse.authorization.exceptions.AuthenticationFailedException; import edu.harvard.iq.dataverse.authorization.exceptions.AuthenticationProviderFactoryNotFoundException; import edu.harvard.iq.dataverse.authorization.exceptions.AuthorizationSetupException; import edu.harvard.iq.dataverse.authorization.providers.AuthenticationProviderFactory; import edu.harvard.iq.dataverse.authorization.providers.AuthenticationProviderRow; import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinAuthenticationProviderFactory; import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUser; import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUserServiceBean; import edu.harvard.iq.dataverse.authorization.providers.echo.EchoAuthenticationProviderFactory; import edu.harvard.iq.dataverse.authorization.providers.shib.ShibAuthenticationProvider; import edu.harvard.iq.dataverse.authorization.users.ApiToken; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import java.sql.Timestamp; import java.util.Calendar; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeSet; import java.util.logging.Level; import java.util.logging.Logger; import javax.annotation.PostConstruct; import javax.ejb.EJB; import javax.ejb.Singleton; import javax.persistence.EntityManager; import javax.persistence.NoResultException; import javax.persistence.NonUniqueResultException; import javax.persistence.PersistenceContext; import javax.persistence.TypedQuery; /** * The AuthenticationManager is responsible for registering and listing * AuthenticationProviders. There's a single instance per application. * * Register the providers in the {@link #startup()} method. */ @Singleton public class AuthenticationServiceBean { private static final Logger logger = Logger.getLogger(AuthenticationServiceBean.class.getName()); /** * Where all registered authentication providers live. */ final Map<String, AuthenticationProvider> authenticationProviders = new HashMap<>(); final Map<String, AuthenticationProviderFactory> providerFactories = new HashMap<>(); @EJB BuiltinUserServiceBean builtinUserServiceBean; @EJB IndexServiceBean indexService; @EJB protected ActionLogServiceBean actionLogSvc; @PersistenceContext(unitName = "VDCNet-ejbPU") private EntityManager em; @PostConstruct public void startup() { // First, set up the factories try { registerProviderFactory( new BuiltinAuthenticationProviderFactory(builtinUserServiceBean) ); registerProviderFactory( new EchoAuthenticationProviderFactory() ); /** * Register shib provider factory here. Test enable/disable via Admin API, etc. */ new ShibAuthenticationProvider(); } catch (AuthorizationSetupException ex) { logger.log(Level.SEVERE, "Exception setting up the authentication provider factories: " + ex.getMessage(), ex); } // Now, load the providers. for ( AuthenticationProviderRow row : em.createNamedQuery("AuthenticationProviderRow.findAllEnabled", AuthenticationProviderRow.class) .getResultList() ) { try { registerProvider( loadProvider(row) ); } catch ( AuthenticationProviderFactoryNotFoundException e ) { logger.log(Level.SEVERE, "Cannot find authentication provider factory with alias '" + e.getFactoryAlias() + "'",e); } catch (AuthorizationSetupException ex) { logger.log(Level.SEVERE, "Exception setting up the authentication provider '" + row.getId() + "': " + ex.getMessage(), ex); } } } public void registerProviderFactory(AuthenticationProviderFactory aFactory) throws AuthorizationSetupException { if ( providerFactories.containsKey(aFactory.getAlias()) ) { throw new AuthorizationSetupException( "Duplicate alias " + aFactory.getAlias() + " for authentication provider factory."); } providerFactories.put( aFactory.getAlias(), aFactory); logger.log( Level.FINE, "Registered Authentication Provider Factory {0} as {1}", new Object[]{aFactory.getInfo(), aFactory.getAlias()}); } /** * Tries to load and {@link AuthenticationProvider} using the passed {@link AuthenticationProviderRow}. * @param aRow The row to load the provider from. * @return The provider, if successful * @throws AuthenticationProviderFactoryNotFoundException If the row specifies a non-existent factory * @throws AuthorizationSetupException If the factory failed to instantiate a provider from the row. */ public AuthenticationProvider loadProvider( AuthenticationProviderRow aRow ) throws AuthenticationProviderFactoryNotFoundException, AuthorizationSetupException { AuthenticationProviderFactory fact = getProviderFactory(aRow.getFactoryAlias()); if ( fact == null ) throw new AuthenticationProviderFactoryNotFoundException(aRow.getFactoryAlias()); return fact.buildProvider(aRow); } public void registerProvider(AuthenticationProvider aProvider) throws AuthorizationSetupException { if ( authenticationProviders.containsKey(aProvider.getId()) ) { throw new AuthorizationSetupException( "Duplicate id " + aProvider.getId() + " for authentication provider."); } authenticationProviders.put( aProvider.getId(), aProvider); actionLogSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.Auth, "registerProvider") .setInfo(aProvider.getId() + ":" + aProvider.getInfo().getTitle())); } public void deregisterProvider( String id ) { authenticationProviders.remove( id ); actionLogSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.Auth, "deregisterProvider") .setInfo(id)); logger.log(Level.INFO,"Deregistered provider {0}", new Object[]{id}); logger.log(Level.INFO,"Providers left {0}", new Object[]{getAuthenticationProviderIds()}); } public Set<String> getAuthenticationProviderIds() { return authenticationProviders.keySet(); } public <T extends AuthenticationProvider> Set<String> getAuthenticationProviderIdsOfType( Class<T> aClass ) { Set<String> retVal = new TreeSet<>(); for ( Map.Entry<String, AuthenticationProvider> p : authenticationProviders.entrySet() ) { if ( aClass.isAssignableFrom( p.getValue().getClass() ) ) { retVal.add( p.getKey() ); } } return retVal; } public AuthenticationProviderFactory getProviderFactory( String alias ) { return providerFactories.get(alias); } public AuthenticationProvider getAuthenticationProvider( String id ) { return authenticationProviders.get( id ); } public AuthenticatedUser findByID(Object pk){ if (pk==null){ return null; } return em.find(AuthenticatedUser.class, pk); } public void removeApiToken(AuthenticatedUser user){ if (user!=null) { ApiToken apiToken = findApiTokenByUser(user); if (apiToken != null) { em.remove(apiToken); } } } /** * Use with care! This method was written primarily for developers * interested in API testing who want to: * * 1. Create a temporary user and get an API token. * * 2. Do some work with that API token. * * 3. Delete all the stuff that was created with the API token. * * 4. Delete the temporary user. * * Before calling this method, make sure you've deleted all the stuff tied * to the user, including stuff they've created, role assignments, group * assignments, etc. * * Longer term, the intention is to have a "disableAuthenticatedUser" * method/command. */ public void deleteAuthenticatedUser(Object pk) { AuthenticatedUser user = em.find(AuthenticatedUser.class, pk); if (user!=null) { ApiToken apiToken = findApiTokenByUser(user); if (apiToken != null) { em.remove(apiToken); } if (user.isBuiltInUser()) { BuiltinUser builtin = builtinUserServiceBean.findByUserName(user.getUserIdentifier()); em.remove(builtin); } actionLogSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.Auth, "deleteUser") .setInfo(user.getUserIdentifier())); em.remove(user.getAuthenticatedUserLookup()); em.remove(user); } } public AuthenticatedUser getAuthenticatedUser( String identifier ) { try { return em.createNamedQuery("AuthenticatedUser.findByIdentifier", AuthenticatedUser.class) .setParameter("identifier", identifier) .getSingleResult(); } catch ( NoResultException nre ) { return null; } } public AuthenticatedUser getAuthenticatedUserByEmail( String email ) { try { return em.createNamedQuery("AuthenticatedUser.findByEmail", AuthenticatedUser.class) .setParameter("email", email) .getSingleResult(); } catch ( NoResultException ex ) { logger.log(Level.INFO, "no user found using {0}", email); return null; } catch ( NonUniqueResultException ex ) { logger.log(Level.INFO, "multiple users found using {0}: {1}", new Object[]{email, ex}); return null; } } public AuthenticatedUser authenticate( String authenticationProviderId, AuthenticationRequest req ) throws AuthenticationFailedException { AuthenticationProvider prv = getAuthenticationProvider(authenticationProviderId); if ( prv == null ) throw new IllegalArgumentException("No authentication provider listed under id " + authenticationProviderId ); AuthenticationResponse resp = prv.authenticate(req); if ( resp.getStatus() == AuthenticationResponse.Status.SUCCESS ) { // yay! see if we already have this user. AuthenticatedUser user = lookupUser(authenticationProviderId, resp.getUserId()); return ( user == null ) ? AuthenticationServiceBean.this.createAuthenticatedUser( new UserRecordIdentifier(authenticationProviderId, resp.getUserId()), resp.getUserId(), resp.getUserDisplayInfo(), true ) : updateAuthenticatedUser( user, resp.getUserDisplayInfo() ); } else { throw new AuthenticationFailedException(resp, "Authentication Failed: " + resp.getMessage()); } } public AuthenticatedUser lookupUser(String authPrvId, String userPersistentId) { TypedQuery<AuthenticatedUserLookup> typedQuery = em.createNamedQuery("AuthenticatedUserLookup.findByAuthPrvID_PersUserId", AuthenticatedUserLookup.class); typedQuery.setParameter("authPrvId", authPrvId); typedQuery.setParameter("persUserId", userPersistentId); try { AuthenticatedUserLookup au = typedQuery.getSingleResult(); return au.getAuthenticatedUser(); } catch (NoResultException | NonUniqueResultException ex) { return null; } } public ApiToken findApiToken(String token) { try { return em.createNamedQuery("ApiToken.findByTokenString", ApiToken.class) .setParameter("tokenString", token) .getSingleResult(); } catch (NoResultException ex) { return null; } } public ApiToken findApiTokenByUser(AuthenticatedUser au) { if (au == null) { return null; } TypedQuery<ApiToken> typedQuery = em.createNamedQuery("ApiToken.findByUser", ApiToken.class); typedQuery.setParameter("user", au); try { return typedQuery.getSingleResult(); } catch (NoResultException | NonUniqueResultException ex) { logger.log(Level.INFO, "When looking up API token for {0} caught {1}", new Object[]{au, ex}); return null; } } // A method for generating a new API token; // TODO: this is a simple, one-size-fits-all solution; we'll need // to expand this system, to be able to generate tokens with different // lifecycles/valid for specific actions only, etc. // -- L.A. 4.0 beta12 public ApiToken generateApiTokenForUser(AuthenticatedUser au) { if (au == null) { return null; } ApiToken apiToken = new ApiToken(); apiToken.setTokenString(java.util.UUID.randomUUID().toString()); apiToken.setAuthenticatedUser(au); Calendar c = Calendar.getInstance(); apiToken.setCreateTime(new Timestamp(c.getTimeInMillis())); c.roll(Calendar.YEAR, 1); apiToken.setExpireTime(new Timestamp(c.getTimeInMillis())); save(apiToken); actionLogSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.Auth, "generateApiToken") .setInfo("user:" + au.getIdentifier() + " token:" + apiToken.getTokenString())); return apiToken; } public AuthenticatedUser lookupUser( String apiToken ) { ApiToken tkn = findApiToken(apiToken); if ( tkn == null ) return null; if ( tkn.isDisabled() ) return null; if ( tkn.getExpireTime() != null ) { if ( tkn.getExpireTime().before( new Timestamp(new Date().getTime())) ) { em.remove(tkn); return null; } } return tkn.getAuthenticatedUser(); } public AuthenticatedUser save( AuthenticatedUser user ) { user.setModificationTime(getCurrentTimestamp()); em.persist(user); em.flush(); return user; } public AuthenticatedUser update( AuthenticatedUser user ) { user.setModificationTime(getCurrentTimestamp()); return em.merge(user); } public ApiToken save( ApiToken aToken ) { if ( aToken.getId() == null ) { em.persist(aToken); return aToken; } else { return em.merge( aToken ); } } /** * Creates an authenticated user based on the passed * {@code userDisplayInfo}, a lookup entry for them based * UserIdentifier.getLookupStringPerAuthProvider (within the supplied * authentication provider), and internal user identifier (used for role * assignments, etc.) based on UserIdentifier.getInternalUserIdentifer. * * @param userRecordId * @param proposedAuthenticatedUserIdentifier * @param userDisplayInfo * @param generateUniqueIdentifier if {@code true}, create a new, unique user identifier for the created user, if the suggested one exists. * @return the newly created user, or {@code null} if the proposed identifier exists and {@code generateUniqueIdentifier} was {@code false}. */ public AuthenticatedUser createAuthenticatedUser(UserRecordIdentifier userRecordId, String proposedAuthenticatedUserIdentifier, AuthenticatedUserDisplayInfo userDisplayInfo, boolean generateUniqueIdentifier) { AuthenticatedUser authenticatedUser = new AuthenticatedUser(); authenticatedUser.applyDisplayInfo(userDisplayInfo); // we now select a username for the generated AuthenticatedUser, or give up String internalUserIdentifer = proposedAuthenticatedUserIdentifier; // TODO should lock table authenticated users for write here if ( identifierExists(internalUserIdentifer) ) { if ( ! generateUniqueIdentifier ) { return null; } int i=1; String identifier = internalUserIdentifer + i; while ( identifierExists(identifier) ) { i += 1; } authenticatedUser.setUserIdentifier(identifier); } else { authenticatedUser.setUserIdentifier(internalUserIdentifer); } authenticatedUser = save( authenticatedUser ); // TODO should unlock table authenticated users for write here AuthenticatedUserLookup auusLookup = userRecordId.createAuthenticatedUserLookup(authenticatedUser); em.persist( auusLookup ); authenticatedUser.setAuthenticatedUserLookup(auusLookup); actionLogSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.Auth, "createUser") .setInfo(authenticatedUser.getIdentifier())); return authenticatedUser; } public boolean identifierExists( String idtf ) { return em.createNamedQuery("AuthenticatedUser.countOfIdentifier", Number.class) .setParameter("identifier", idtf) .getSingleResult().intValue() > 0; } public AuthenticatedUser updateAuthenticatedUser(AuthenticatedUser user, AuthenticatedUserDisplayInfo userDisplayInfo) { user.applyDisplayInfo(userDisplayInfo); actionLogSvc.log( new ActionLogRecord(ActionLogRecord.ActionType.Auth, "updateUser") .setInfo(user.getIdentifier())); return update(user); } public List<AuthenticatedUser> findAllAuthenticatedUsers() { return em.createNamedQuery("AuthenticatedUser.findAll", AuthenticatedUser.class).getResultList(); } public List<AuthenticatedUser> findSuperUsers() { return em.createNamedQuery("AuthenticatedUser.findSuperUsers", AuthenticatedUser.class).getResultList(); } public Set<AuthenticationProviderFactory> listProviderFactories() { return new HashSet<>( providerFactories.values() ); } public Timestamp getCurrentTimestamp() { return new Timestamp(new Date().getTime()); } // TODO should probably be moved to the Shib provider - this is a classic Shib-specific // use case. This class should deal with general autnetications. public AuthenticatedUser convertBuiltInToShib(AuthenticatedUser builtInUserToConvert, String shibProviderId, UserIdentifier newUserIdentifierInLookupTable) { logger.info("converting user " + builtInUserToConvert.getId() + " from builtin to shib"); String builtInUserIdentifier = builtInUserToConvert.getIdentifier(); logger.info("builtin user identifier: " + builtInUserIdentifier); TypedQuery<AuthenticatedUserLookup> typedQuery = em.createQuery("SELECT OBJECT(o) FROM AuthenticatedUserLookup AS o WHERE o.authenticatedUser = :auid", AuthenticatedUserLookup.class); typedQuery.setParameter("auid", builtInUserToConvert); AuthenticatedUserLookup authuserLookup; try { authuserLookup = typedQuery.getSingleResult(); } catch (NoResultException | NonUniqueResultException ex) { logger.info("exception caught: " + ex); return null; } if (authuserLookup == null) { return null; } String oldProviderId = authuserLookup.getAuthenticationProviderId(); logger.info("we expect this to be 'builtin': " + oldProviderId); authuserLookup.setAuthenticationProviderId(shibProviderId); String oldUserLookupIdentifier = authuserLookup.getPersistentUserId(); logger.info("this should be 'pete' or whatever the old builtin username was: " + oldUserLookupIdentifier); String perUserShibIdentifier = newUserIdentifierInLookupTable.getLookupStringPerAuthProvider(); authuserLookup.setPersistentUserId(perUserShibIdentifier); /** * @todo this should be a transaction of some kind. We want to update * the authenticateduserlookup and also delete the row from the * builtinuser table in a single transaction. */ em.persist(authuserLookup); String builtinUsername = builtInUserIdentifier.replaceFirst(AuthenticatedUser.IDENTIFIER_PREFIX, ""); BuiltinUser builtin = builtinUserServiceBean.findByUserName(builtinUsername); if (builtin != null) { em.remove(builtin); } else { logger.info("Couldn't delete builtin user because could find it based on username " + builtinUsername); } AuthenticatedUser shibUser = lookupUser(shibProviderId, perUserShibIdentifier); if (shibUser != null) { return shibUser; } return null; } }
apache-2.0
hkh412/OneTwoThree_Deprecated
src/com/hkh/ott123/MainActivity.java
16179
package com.hkh.ott123; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.util.ArrayList; import java.util.List; import java.util.Timer; import java.util.TimerTask; import android.app.ActionBar; import android.app.Activity; import android.app.SearchManager; import android.content.Context; import android.content.Intent; import android.content.res.AssetManager; import android.content.res.Configuration; import android.os.Bundle; import android.provider.Settings.Secure; import android.support.v4.app.ActionBarDrawerToggle; import android.support.v4.app.Fragment; import android.support.v4.view.GravityCompat; import android.support.v4.view.ViewPager; import android.support.v4.widget.DrawerLayout; import android.util.Log; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.widget.ExpandableListView; import android.widget.ExpandableListView.OnChildClickListener; import android.widget.FrameLayout; import android.widget.LinearLayout; import android.widget.SearchView; import android.widget.TextView; import android.widget.Toast; import com.google.android.gms.ads.AdRequest; import com.google.android.gms.ads.AdView; import com.google.android.gms.analytics.HitBuilders; import com.google.android.gms.analytics.Tracker; import com.google.gson.Gson; import com.google.gson.stream.JsonReader; import com.hkh.ott123.AnalyticsApplication.TrackerName; import com.hkh.ott123.adapter.ActionBarDropDownAdapter; import com.hkh.ott123.adapter.ExpandableDrawerListAdapter; import com.hkh.ott123.adapter.ListDetailPagerAdapter; import com.hkh.ott123.config.Config; import com.hkh.ott123.data.CityData; import com.hkh.ott123.data.Session; import com.hkh.ott123.data.UrlData; import com.hkh.ott123.fragments.BoardFragment; import com.hkh.ott123.fragments.LoginDialogFragment; import com.hkh.ott123.fragments.LoginDialogFragment.LoginListener; import com.hkh.ott123.manager.AppDataManager; import com.hkh.ott123.manager.LoginManager; import com.hkh.ott123.manager.PostStateManager; import com.hkh.ott123.manager.SessionManager; import com.hkh.ott123.manager.SharedPreferenceManager; import com.hkh.ott123.util.Util; import com.purplebrain.adbuddiz.sdk.AdBuddiz; public class MainActivity extends PagerActivity implements ActionBar.OnNavigationListener, View.OnClickListener, LoginListener, OnChildClickListener { private static String TAG = MainActivity.class.getSimpleName(); Context mContext; ActionBarDrawerToggle mDrawerToggle; DrawerLayout mDrawerLayout; /** * 왼쪽 슬라이드 메뉴관련 */ LinearLayout mLeftDrawer; FrameLayout mLayoutHead; FrameLayout mLayoutProfile; TextView tvLogin; ExpandableListView mDrawerList; TextView tvNickname; TextView tvLevel; TextView tvPoint; ActionBarDropDownAdapter mActionBarAdapter; ExpandableDrawerListAdapter mDrawerListAdapter; ActionBar actionBar; /** * 지역별 도메인 정보 */ ArrayList<CityData> cityList = new ArrayList<CityData>(); /** * 메뉴 데이터 (상위, 하위 포함) */ ArrayList<UrlData> urlList = new ArrayList<UrlData>(); /** * 좌측 상위 메뉴 데이터 (지역정보, 이야기방..) */ ArrayList<UrlData> parentNodes = new ArrayList<UrlData>(); /** * 메뉴하위 게시글 목록 (구인정보, 친구사귀기 등..) */ ArrayList<ArrayList<UrlData>> childNodes = new ArrayList<ArrayList<UrlData>>(); /** * 좌측메뉴 최초 열림 */ boolean initialMenuOpen = false; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); mContext = this; setContentView(R.layout.activity_main); mLoadingLayout = (LinearLayout) findViewById(R.id.layout_indicator); // 지역 정보 데이터 로딩 cityList = loadCityData(); // 메뉴 데이터 로딩 / 즐겨찾기 불러오기 urlList = Util.loadUrlMapData(mContext, parentNodes, childNodes); restoreFavoriteMenu(urlList); mDrawerLayout = (DrawerLayout) findViewById(R.id.drawer_layout); mDrawerLayout.setDrawerShadow(R.drawable.drawer_shadow, GravityCompat.START); mDrawerToggle = new ActionBarDrawerToggle(this, mDrawerLayout, R.drawable.ic_drawer, R.string.drawer_open, R.string.drawer_close) { @Override public void onDrawerClosed(View drawerView) { super.onDrawerClosed(drawerView); invalidateOptionsMenu(); } @Override public void onDrawerOpened(View drawerView) { super.onDrawerOpened(drawerView); hideSoftKeyboard(); invalidateOptionsMenu(); } }; mDrawerToggle.setDrawerIndicatorEnabled(true); mDrawerLayout.setDrawerListener(mDrawerToggle); mLeftDrawer = (LinearLayout) findViewById(R.id.left_drawer); mLayoutHead = (FrameLayout) findViewById(R.id.layout_head); mLayoutProfile = (FrameLayout) findViewById(R.id.layout_profile); tvLogin = (TextView) findViewById(R.id.tv_login_text); tvNickname = (TextView) findViewById(R.id.tv_profile_nickname); tvLevel = (TextView) findViewById(R.id.tv_profile_level); tvPoint = (TextView) findViewById(R.id.tv_profile_point); mLayoutHead.setOnClickListener(this); mViewPager = (ViewPager) findViewById(R.id.pager); mViewPager.setOnPageChangeListener(this); mDrawerList = (ExpandableListView) findViewById(R.id.drawer_list); mDrawerListAdapter = new ExpandableDrawerListAdapter(mContext, parentNodes, childNodes); mDrawerList.setAdapter(mDrawerListAdapter); /** * 좌측 메뉴 child node 클릭 listener 설정 */ mDrawerList.setOnChildClickListener(this); // 즐겨찾기 메뉴는 기본으로 Expand 상태 mDrawerList.expandGroup(0); int lastUid = SharedPreferenceManager.getInstance(mContext).getInt("last-menu-uid"); UrlData lastUrlData = null; if (lastUid < 0) { // 저장된 메뉴 없음, 도움요청 lastUrlData = Util.getMatchedUrlDataByUid(urlList, 11); } else { lastUrlData = Util.getMatchedUrlDataByUid(urlList, lastUid); } AppDataManager.getInstance().setCurrentUrlData(lastUrlData); setActionBar(); setProfile(); setAdView(); sendAnalytics(); checkAutoLogin(); } @Override protected void onPostCreate(Bundle savedInstanceState) { super.onPostCreate(savedInstanceState); mDrawerToggle.syncState(); } /** * 액션바 관련 설정 */ private void setActionBar() { actionBar = getActionBar(); actionBar.setDisplayHomeAsUpEnabled(true); actionBar.setHomeButtonEnabled(true); actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_LIST); mActionBarAdapter = new ActionBarDropDownAdapter(mContext, android.R.layout.simple_spinner_dropdown_item, cityList); actionBar.setListNavigationCallbacks(mActionBarAdapter, this); String city = SharedPreferenceManager.getInstance(mContext).getString("city"); if (city != null) { int position = Util.findFirstMatch(cityList, city); if (position >= 0) { actionBar.setSelectedNavigationItem(position); } } } @Override public void setAdView() { if (!Config.AdEnable) { return; } mAdView = (AdView) findViewById(R.id.adView); mAdView.setAdListener(new ToastAdListener(this)); String deviceId = Secure.getString(getContentResolver(), Secure.ANDROID_ID); Log.d(TAG, "DeviceId: "+deviceId); AdRequest.Builder builder = new AdRequest.Builder(); if (Config.ADVIEW_TEST) { builder.addTestDevice(AdRequest.DEVICE_ID_EMULATOR) .addTestDevice(deviceId); } adRequest = builder.build(); mAdView.loadAd(adRequest); // AD provider - adbuddiz.com SharedPreferenceManager spm = SharedPreferenceManager.getInstance(mContext); int viewCnt = spm.getInt("view_count"); if (viewCnt >= Config.AD_THRESHOLD) { spm.putInt("view_count", 0); AdBuddiz.setPublisherKey(mContext.getString(R.string.adbuddiz_pub_key)); AdBuddiz.cacheAds((Activity)mContext); AdBuddiz.showAd(this); } else { viewCnt++; spm.putInt("view_count", viewCnt); } } public void sendAnalytics() { Tracker t = ((AnalyticsApplication) getApplication()).getTracker( TrackerName.APP_TRACKER); t.setScreenName(TAG); t.send(new HitBuilders.AppViewBuilder().build()); } /** * 도시 데이터 로딩 * @return */ private ArrayList<CityData> loadCityData() { AssetManager assetManager = mContext.getAssets(); ArrayList<CityData> cityList = new ArrayList<CityData>(); Gson gson = new Gson(); try { InputStream in = assetManager.open("city.json"); JsonReader reader = new JsonReader(new InputStreamReader(in, "UTF-8")); reader.beginArray(); while (reader.hasNext()) { CityData cityData = gson.fromJson(reader, CityData.class); cityList.add(cityData); } reader.endArray(); reader.close(); } catch (IOException e) { e.printStackTrace(); } return cityList; } private void loadFragment(UrlData urlData) { actionBar.setTitle(urlData.getName()); Fragment fragment = new BoardFragment(); mListDetailAdapter = null; mListDetailAdapter = new ListDetailPagerAdapter(getSupportFragmentManager(), fragment); mViewPager.setAdapter(mListDetailAdapter); mViewPager.setOffscreenPageLimit(mListDetailAdapter.getCount()); } @Override public boolean onCreateOptionsMenu(Menu menu) { getMenuInflater().inflate(R.menu.main, menu); MenuItem searchItem = menu.findItem(R.id.action_search); searchItem.setVisible(false); SearchView searchView = (SearchView) searchItem.getActionView(); SearchManager searchManager = (SearchManager) getSystemService(Context.SEARCH_SERVICE); searchView.setSearchableInfo(searchManager.getSearchableInfo(getComponentName())); return super.onCreateOptionsMenu(menu); } @Override public boolean onOptionsItemSelected(MenuItem item) { if (mDrawerToggle.onOptionsItemSelected(item)) { return true; } int id = item.getItemId(); if (id==R.id.action_write) { // 글쓰기 Session session = SessionManager.getInstance().getSession(); if (session == null) { Toast.makeText(mContext, mContext.getString(R.string.message_write_login), Toast.LENGTH_LONG).show(); return true; } Intent intent = new Intent(mContext, WriteActivity.class); startActivityForResult(intent, 0); return true; } return super.onOptionsItemSelected(item); } @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { if (requestCode == 0) { if (resultCode == RESULT_OK) { UrlData urlData = AppDataManager.getInstance().getCurrentUrlData(); loadFragment(urlData); } } } @Override public boolean onChildClick(ExpandableListView parent, View v, int groupPosition, int childPosition, long id) { mDrawerLayout.closeDrawer(mLeftDrawer); mBackBtnClickCount = 0; mViewPager.setAdapter(null); // 선택한 메뉴 인덱스 저장 UrlData urlData = mDrawerListAdapter.getChild(groupPosition, childPosition); AppDataManager.getInstance().setCurrentUrlData(urlData); int parentIndex = groupPosition; if (parentIndex == 0) { // 즐겨찾기 클릭 for (int i=0; i<childNodes.size(); i++) { ArrayList<UrlData> list = childNodes.get(i); if (list.indexOf(urlData) >= 0) { parentIndex = i; break; } } } SharedPreferenceManager.getInstance(mContext).putInt("last-menu-uid", urlData.getUid()); loadFragment(urlData); return true; } @Override public boolean onNavigationItemSelected(int position, long itemId) { hideSoftKeyboard(); CityData cityData = mActionBarAdapter.getItem(position); AppDataManager.getInstance().setCurrentCityData(cityData); SharedPreferenceManager.getInstance(mContext).putString("city", cityData.getCity()); mDrawerLayout.closeDrawer(mLeftDrawer); UrlData urlData = AppDataManager.getInstance().getCurrentUrlData(); loadFragment(urlData); return true; } /** * 좌측 사용자 정보 설정 */ private void setProfile() { Session session = SessionManager.getInstance().getSession(); if (session != null) { tvLogin.setVisibility(View.GONE); mLayoutProfile.setVisibility(View.VISIBLE); tvNickname.setText(session.getNickName()); tvLevel.setText(session.getLevel()); tvPoint.setText(session.getPoint()); } else { tvLogin.setVisibility(View.VISIBLE); mLayoutProfile.setVisibility(View.GONE); } } /** * 자동 로그인 설정여부를 체크하고 자동 로그인 수행 */ private void checkAutoLogin() { SharedPreferenceManager spm = SharedPreferenceManager.getInstance(mContext); boolean isAutoLogin = spm.getBoolean("auto-login"); if (isAutoLogin) { String username = spm.getString("username"); String password = spm.getString("password"); if (username == null || password == null) { Log.e(TAG, "자동로그인이 체크되었지만 저장된 계정에 문제가 발생함."); } else { LoginManager.getInstance().doLogin(mContext, username, password, this); } } } @Override public void onClick(View v) { if (mLayoutHead == v) { mDrawerLayout.closeDrawer(mLeftDrawer); // 로그인 LoginDialogFragment dialog = new LoginDialogFragment(); dialog.setLoginListener(this); dialog.show(getSupportFragmentManager(), "NoticeDialogFragment"); } } @Override public void onLoginComplete() { // 로그인 완료 setProfile(); refreshWritePermission(); SharedPreferenceManager spm = SharedPreferenceManager.getInstance(mContext); // 자동로그인 설정 spm.putBoolean("auto-login", true); Toast.makeText(mContext, mContext.getString(R.string.message_login_success), Toast.LENGTH_SHORT).show(); } @Override public void onLoginFailure(String message) { if (message != null && message.length() > 0) { Toast.makeText(mContext, message, Toast.LENGTH_SHORT).show(); } else { Toast.makeText(mContext, mContext.getString(R.string.message_login_problem), Toast.LENGTH_SHORT).show(); } } @Override public void onLogoutComplete() { // session 지움 SessionManager.getInstance().setSession(null); // 자동로그인 false SharedPreferenceManager spm = SharedPreferenceManager.getInstance(mContext); spm.putBoolean("auto-login", false); // setProfile 호출 setProfile(); // Toast Toast.makeText(mContext, mContext.getString(R.string.message_logout_success), Toast.LENGTH_SHORT).show(); } @Override public void onLogoutFailure() { Toast.makeText(mContext, mContext.getString(R.string.message_logout_problem), Toast.LENGTH_SHORT).show(); } /** * 저장된 즐겨찾기 불러오기 * @param urlList */ private void restoreFavoriteMenu(List<UrlData> urlList) { // restore favorite menu PostStateManager psm = PostStateManager.getInstance(mContext); List<Integer> favoriteList = psm.restoreFavoriteMenu(); // 저장된 favoriteList가 있으면 즐겨찾기메뉴에 추가 if (favoriteList != null) { ArrayList<UrlData> favorites = childNodes.get(0); for (UrlData menuData : urlList) { if (favoriteList.contains(menuData.getUid())) { favorites.add(menuData); } } } } /** * 처음 게시글목록 로딩완료후 side menu 열림 */ public void openDrawerList() { if (!initialMenuOpen) { TimerTask timerTask = new TimerTask() { @Override public void run() { runOnUiThread(new Runnable(){ @Override public void run() { mDrawerLayout.openDrawer(mLeftDrawer); } }); } }; Timer timer = new Timer(); timer.schedule(timerTask, 300); initialMenuOpen = true; } } @Override public void onShowBackButtonToast() { Toast.makeText(mContext, mContext.getString(R.string.message_back_button_toast), Toast.LENGTH_SHORT).show(); } @Override public void onConfigurationChanged(Configuration newConfig) { super.onConfigurationChanged(newConfig); if (newConfig.keyboardHidden == Configuration.KEYBOARDHIDDEN_NO) { Log.d(TAG, "keyboard hidden"); } else { Log.d(TAG, "keyboard showed up"); } } }
apache-2.0
furaoing/HanLP
src/main/java/com/hankcs/hanlp/ie/ner/util/Config.java
859
package com.hankcs.hanlp.ie.ner.util; import java.util.HashSet; import java.util.Set; /** * Created by roy on 2016/4/15. */ public class Config { public Set<String> orgTagging; public Set<String> personTagging; public Config(){ orgTagging = loadOrganizationTagging(); personTagging = loadPersonNameTagging(); } private Set<String> loadOrganizationTagging() { Set<String> nature = new HashSet<String>(); nature.add("nz"); nature.add("ntc"); nature.add("nt"); nature.add("nio"); nature.add("ni"); nature.add("ntcb"); nature.add("ntcf"); nature.add("ntch"); return nature; } private Set<String> loadPersonNameTagging() { Set<String> nature = new HashSet<String>(); nature.add("nr"); return nature; } }
apache-2.0
joshuairl/toothchat-client
src/jtattoo/src/com/jtattoo/plaf/BaseRadioButtonMenuItemUI.java
545
/* * Copyright 2005 MH-Software-Entwicklung. All rights reserved. * Use is subject to license terms. */ package com.jtattoo.plaf; import javax.swing.*; import javax.swing.plaf.*; /** * @author Michael Hagen */ public class BaseRadioButtonMenuItemUI extends BaseMenuItemUI { public static ComponentUI createUI(JComponent b) { return new BaseRadioButtonMenuItemUI(); } protected void installDefaults() { super.installDefaults(); checkIcon = UIManager.getIcon("RadioButtonMenuItem.checkIcon"); } }
apache-2.0
Clairebi/ElasticsearchClone
src/test/java/org/elasticsearch/index/query/SimpleIndexQueryParserTests.java
136894
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.query; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import org.apache.lucene.analysis.core.WhitespaceAnalyzer; import org.apache.lucene.index.Fields; import org.apache.lucene.index.MultiFields; import org.apache.lucene.index.Term; import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.index.memory.MemoryIndex; import org.apache.lucene.queries.BoostingQuery; import org.apache.lucene.queries.ExtendedCommonTermsQuery; import org.apache.lucene.queries.TermsQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.DisjunctionMaxQuery; import org.apache.lucene.search.Filter; import org.apache.lucene.search.FilteredQuery; import org.apache.lucene.search.FuzzyQuery; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.NumericRangeQuery; import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryWrapperFilter; import org.apache.lucene.search.RegexpQuery; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermRangeQuery; import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.search.spans.FieldMaskingSpanQuery; import org.apache.lucene.search.spans.SpanFirstQuery; import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper; import org.apache.lucene.search.spans.SpanNearQuery; import org.apache.lucene.search.spans.SpanNotQuery; import org.apache.lucene.search.spans.SpanOrQuery; import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.spatial.prefix.IntersectsPrefixTreeFilter; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.CharsRefBuilder; import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.automaton.TooComplexToDeterminizeException; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchIllegalArgumentException; import org.elasticsearch.action.termvectors.MultiTermVectorsItemResponse; import org.elasticsearch.action.termvectors.MultiTermVectorsRequest; import org.elasticsearch.action.termvectors.MultiTermVectorsResponse; import org.elasticsearch.action.termvectors.TermVectorsRequest; import org.elasticsearch.action.termvectors.TermVectorsResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.compress.CompressedString; import org.elasticsearch.common.lucene.search.MoreLikeThisQuery; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.lucene.search.function.BoostScoreFunction; import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery; import org.elasticsearch.common.lucene.search.function.WeightFactorFunction; import org.elasticsearch.common.settings.ImmutableSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.core.NumberFieldMapper; import org.elasticsearch.index.search.NumericRangeFieldDataFilter; import org.elasticsearch.index.search.child.CustomQueryWrappingFilter; import org.elasticsearch.index.search.child.ParentConstantScoreQuery; import org.elasticsearch.index.search.geo.GeoDistanceFilter; import org.elasticsearch.index.search.geo.GeoPolygonFilter; import org.elasticsearch.index.search.geo.InMemoryGeoBoundingBoxFilter; import org.elasticsearch.index.search.morelikethis.MoreLikeThisFetchService; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.ElasticsearchSingleNodeTest; import org.hamcrest.Matchers; import org.junit.Before; import org.junit.Test; import java.io.IOException; import java.lang.reflect.Field; import java.util.EnumSet; import java.util.List; import static org.elasticsearch.common.io.Streams.copyToBytesFromClasspath; import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.FilterBuilders.andFilter; import static org.elasticsearch.index.query.FilterBuilders.boolFilter; import static org.elasticsearch.index.query.FilterBuilders.notFilter; import static org.elasticsearch.index.query.FilterBuilders.orFilter; import static org.elasticsearch.index.query.FilterBuilders.prefixFilter; import static org.elasticsearch.index.query.FilterBuilders.queryFilter; import static org.elasticsearch.index.query.FilterBuilders.rangeFilter; import static org.elasticsearch.index.query.FilterBuilders.termFilter; import static org.elasticsearch.index.query.FilterBuilders.termsFilter; import static org.elasticsearch.index.query.QueryBuilders.boolQuery; import static org.elasticsearch.index.query.QueryBuilders.boostingQuery; import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery; import static org.elasticsearch.index.query.QueryBuilders.disMaxQuery; import static org.elasticsearch.index.query.QueryBuilders.filteredQuery; import static org.elasticsearch.index.query.QueryBuilders.functionScoreQuery; import static org.elasticsearch.index.query.QueryBuilders.fuzzyQuery; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.moreLikeThisQuery; import static org.elasticsearch.index.query.QueryBuilders.prefixQuery; import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery; import static org.elasticsearch.index.query.QueryBuilders.rangeQuery; import static org.elasticsearch.index.query.QueryBuilders.regexpQuery; import static org.elasticsearch.index.query.QueryBuilders.spanFirstQuery; import static org.elasticsearch.index.query.QueryBuilders.spanNearQuery; import static org.elasticsearch.index.query.QueryBuilders.spanNotQuery; import static org.elasticsearch.index.query.QueryBuilders.spanOrQuery; import static org.elasticsearch.index.query.QueryBuilders.spanTermQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.index.query.QueryBuilders.termsQuery; import static org.elasticsearch.index.query.QueryBuilders.wildcardQuery; import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.factorFunction; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBooleanSubQuery; import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.sameInstance; /** * */ public class SimpleIndexQueryParserTests extends ElasticsearchSingleNodeTest { private IndexQueryParserService queryParser; @Before public void setup() throws IOException { Settings settings = ImmutableSettings.settingsBuilder() .put("index.cache.filter.type", "none") .put("name", "SimpleIndexQueryParserTests") .build(); IndexService indexService = createIndex("test", settings); MapperService mapperService = indexService.mapperService(); String mapping = copyToStringFromClasspath("/org/elasticsearch/index/query/mapping.json"); mapperService.merge("person", new CompressedString(mapping), true); ParsedDocument doc = mapperService.documentMapper("person").parse(new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/query/data.json"))); assertNotNull(doc.dynamicMappingsUpdate()); client().admin().indices().preparePutMapping("test").setType("person").setSource(doc.dynamicMappingsUpdate().toString()).get(); queryParser = indexService.queryParserService(); } private IndexQueryParserService queryParser() throws IOException { return this.queryParser; } private BytesRef longToPrefixCoded(long val, int shift) { BytesRefBuilder bytesRef = new BytesRefBuilder(); NumericUtils.longToPrefixCoded(val, shift, bytesRef); return bytesRef.get(); } @Test public void testQueryStringBuilder() throws Exception { IndexQueryParserService queryParser = queryParser(); Query parsedQuery = queryParser.parse(queryStringQuery("test").defaultField("content").phraseSlop(1)).query(); assertThat(parsedQuery, instanceOf(TermQuery.class)); TermQuery termQuery = (TermQuery) parsedQuery; assertThat(termQuery.getTerm(), equalTo(new Term("content", "test"))); } @Test public void testQueryString() throws Exception { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/query.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(TermQuery.class)); TermQuery termQuery = (TermQuery) parsedQuery; assertThat(termQuery.getTerm(), equalTo(new Term("content", "test"))); } @Test public void testQueryStringBoostsBuilder() throws Exception { IndexQueryParserService queryParser = queryParser(); QueryStringQueryBuilder builder = queryStringQuery("field:boosted^2"); Query parsedQuery = queryParser.parse(builder).query(); assertThat(parsedQuery, instanceOf(TermQuery.class)); assertThat(((TermQuery) parsedQuery).getTerm(), equalTo(new Term("field", "boosted"))); assertThat(parsedQuery.getBoost(), equalTo(2.0f)); builder.boost(2.0f); parsedQuery = queryParser.parse(builder).query(); assertThat(parsedQuery.getBoost(), equalTo(4.0f)); builder = queryStringQuery("((field:boosted^2) AND (field:foo^1.5))^3"); parsedQuery = queryParser.parse(builder).query(); assertThat(parsedQuery, instanceOf(BooleanQuery.class)); assertThat(assertBooleanSubQuery(parsedQuery, TermQuery.class, 0).getTerm(), equalTo(new Term("field", "boosted"))); assertThat(assertBooleanSubQuery(parsedQuery, TermQuery.class, 0).getBoost(), equalTo(2.0f)); assertThat(assertBooleanSubQuery(parsedQuery, TermQuery.class, 1).getTerm(), equalTo(new Term("field", "foo"))); assertThat(assertBooleanSubQuery(parsedQuery, TermQuery.class, 1).getBoost(), equalTo(1.5f)); assertThat(parsedQuery.getBoost(), equalTo(3.0f)); builder.boost(2.0f); parsedQuery = queryParser.parse(builder).query(); assertThat(parsedQuery.getBoost(), equalTo(6.0f)); } @Test public void testQueryStringFields1Builder() throws Exception { IndexQueryParserService queryParser = queryParser(); Query parsedQuery = queryParser.parse(queryStringQuery("test").field("content").field("name").useDisMax(false)).query(); assertThat(parsedQuery, instanceOf(BooleanQuery.class)); BooleanQuery bQuery = (BooleanQuery) parsedQuery; assertThat(bQuery.clauses().size(), equalTo(2)); assertThat(assertBooleanSubQuery(parsedQuery, TermQuery.class, 0).getTerm(), equalTo(new Term("content", "test"))); assertThat(assertBooleanSubQuery(parsedQuery, TermQuery.class, 1).getTerm(), equalTo(new Term("name", "test"))); } @Test public void testQueryStringFields1() throws Exception { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/query-fields1.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(BooleanQuery.class)); BooleanQuery bQuery = (BooleanQuery) parsedQuery; assertThat(bQuery.clauses().size(), equalTo(2)); assertThat(assertBooleanSubQuery(parsedQuery, TermQuery.class, 0).getTerm(), equalTo(new Term("content", "test"))); assertThat(assertBooleanSubQuery(parsedQuery, TermQuery.class, 1).getTerm(), equalTo(new Term("name", "test"))); } @Test public void testQueryStringFieldsMatch() throws Exception { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/query-fields-match.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(BooleanQuery.class)); BooleanQuery bQuery = (BooleanQuery) parsedQuery; assertThat(bQuery.clauses().size(), equalTo(2)); assertEquals(Sets.newHashSet(new Term("name.first", "test"), new Term("name.last", "test")), Sets.newHashSet(assertBooleanSubQuery(parsedQuery, TermQuery.class, 0).getTerm(), assertBooleanSubQuery(parsedQuery, TermQuery.class, 1).getTerm())); } @Test public void testQueryStringFields2Builder() throws Exception { IndexQueryParserService queryParser = queryParser(); Query parsedQuery = queryParser.parse(queryStringQuery("test").field("content").field("name").useDisMax(true)).query(); assertThat(parsedQuery, instanceOf(DisjunctionMaxQuery.class)); DisjunctionMaxQuery disMaxQuery = (DisjunctionMaxQuery) parsedQuery; List<Query> disjuncts = disMaxQuery.getDisjuncts(); assertThat(((TermQuery) disjuncts.get(0)).getTerm(), equalTo(new Term("content", "test"))); assertThat(((TermQuery) disjuncts.get(1)).getTerm(), equalTo(new Term("name", "test"))); } @Test public void testQueryStringFields2() throws Exception { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/query-fields2.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(DisjunctionMaxQuery.class)); DisjunctionMaxQuery disMaxQuery = (DisjunctionMaxQuery) parsedQuery; List<Query> disjuncts = disMaxQuery.getDisjuncts(); assertThat(((TermQuery) disjuncts.get(0)).getTerm(), equalTo(new Term("content", "test"))); assertThat(((TermQuery) disjuncts.get(1)).getTerm(), equalTo(new Term("name", "test"))); } @Test public void testQueryStringFields3Builder() throws Exception { IndexQueryParserService queryParser = queryParser(); Query parsedQuery = queryParser.parse(queryStringQuery("test").field("content", 2.2f).field("name").useDisMax(true)).query(); assertThat(parsedQuery, instanceOf(DisjunctionMaxQuery.class)); DisjunctionMaxQuery disMaxQuery = (DisjunctionMaxQuery) parsedQuery; List<Query> disjuncts = disMaxQuery.getDisjuncts(); assertThat(((TermQuery) disjuncts.get(0)).getTerm(), equalTo(new Term("content", "test"))); assertThat((double) disjuncts.get(0).getBoost(), closeTo(2.2, 0.01)); assertThat(((TermQuery) disjuncts.get(1)).getTerm(), equalTo(new Term("name", "test"))); assertThat((double) disjuncts.get(1).getBoost(), closeTo(1, 0.01)); } @Test public void testQueryStringFields3() throws Exception { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/query-fields3.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(DisjunctionMaxQuery.class)); DisjunctionMaxQuery disMaxQuery = (DisjunctionMaxQuery) parsedQuery; List<Query> disjuncts = disMaxQuery.getDisjuncts(); assertThat(((TermQuery) disjuncts.get(0)).getTerm(), equalTo(new Term("content", "test"))); assertThat((double) disjuncts.get(0).getBoost(), closeTo(2.2, 0.01)); assertThat(((TermQuery) disjuncts.get(1)).getTerm(), equalTo(new Term("name", "test"))); assertThat((double) disjuncts.get(1).getBoost(), closeTo(1, 0.01)); } @Test public void testQueryStringTimezone() throws Exception { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/query-timezone.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(TermRangeQuery.class)); try { queryParser.parse(copyToStringFromClasspath("/org/elasticsearch/index/query/query-timezone-incorrect.json")); fail("we expect a QueryParsingException as we are providing an unknown time_zome"); } catch (QueryParsingException e) { // We expect this one } } @Test public void testQueryStringRegexp() throws Exception { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/query-regexp-max-determinized-states.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(RegexpQuery.class)); RegexpQuery regexpQuery = (RegexpQuery) parsedQuery; assertTrue(regexpQuery.toString().contains("/foo*bar/")); } @Test public void testQueryStringRegexpTooManyDeterminizedStates() throws Exception { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/query-regexp-too-many-determinized-states.json"); try { queryParser.parse(query).query(); fail("did not hit exception"); } catch (QueryParsingException qpe) { // expected assertTrue(qpe.getCause() instanceof TooComplexToDeterminizeException); } } @Test public void testMatchAllBuilder() throws Exception { IndexQueryParserService queryParser = queryParser(); Query parsedQuery = queryParser.parse(matchAllQuery().boost(1.2f)).query(); assertThat(parsedQuery, instanceOf(MatchAllDocsQuery.class)); MatchAllDocsQuery matchAllDocsQuery = (MatchAllDocsQuery) parsedQuery; assertThat((double) matchAllDocsQuery.getBoost(), closeTo(1.2, 0.01)); } @Test public void testMatchAll() throws Exception { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/matchAll.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(MatchAllDocsQuery.class)); MatchAllDocsQuery matchAllDocsQuery = (MatchAllDocsQuery) parsedQuery; assertThat((double) matchAllDocsQuery.getBoost(), closeTo(1.2, 0.01)); } @Test public void testMatchAllEmpty1() throws Exception { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/match_all_empty1.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, equalTo(Queries.newMatchAllQuery())); assertThat(parsedQuery, not(sameInstance(Queries.newMatchAllQuery()))); } @Test public void testMatchAllEmpty2() throws Exception { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/match_all_empty2.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, equalTo(Queries.newMatchAllQuery())); assertThat(parsedQuery, not(sameInstance(Queries.newMatchAllQuery()))); } @Test public void testStarColonStar() throws Exception { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/starColonStar.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(MatchAllDocsQuery.class)); } @Test public void testDisMaxBuilder() throws Exception { IndexQueryParserService queryParser = queryParser(); Query parsedQuery = queryParser.parse(disMaxQuery().boost(1.2f).tieBreaker(0.7f).add(termQuery("name.first", "first")).add(termQuery("name.last", "last"))).query(); assertThat(parsedQuery, instanceOf(DisjunctionMaxQuery.class)); DisjunctionMaxQuery disjunctionMaxQuery = (DisjunctionMaxQuery) parsedQuery; assertThat((double) disjunctionMaxQuery.getBoost(), closeTo(1.2, 0.01)); List<Query> disjuncts = disjunctionMaxQuery.getDisjuncts(); assertThat(disjuncts.size(), equalTo(2)); Query firstQ = disjuncts.get(0); assertThat(firstQ, instanceOf(TermQuery.class)); assertThat(((TermQuery) firstQ).getTerm(), equalTo(new Term("name.first", "first"))); Query secondsQ = disjuncts.get(1); assertThat(secondsQ, instanceOf(TermQuery.class)); assertThat(((TermQuery) secondsQ).getTerm(), equalTo(new Term("name.last", "last"))); } @Test public void testDisMax() throws Exception { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/disMax.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(DisjunctionMaxQuery.class)); DisjunctionMaxQuery disjunctionMaxQuery = (DisjunctionMaxQuery) parsedQuery; assertThat((double) disjunctionMaxQuery.getBoost(), closeTo(1.2, 0.01)); List<Query> disjuncts = disjunctionMaxQuery.getDisjuncts(); assertThat(disjuncts.size(), equalTo(2)); Query firstQ = disjuncts.get(0); assertThat(firstQ, instanceOf(TermQuery.class)); assertThat(((TermQuery) firstQ).getTerm(), equalTo(new Term("name.first", "first"))); Query secondsQ = disjuncts.get(1); assertThat(secondsQ, instanceOf(TermQuery.class)); assertThat(((TermQuery) secondsQ).getTerm(), equalTo(new Term("name.last", "last"))); } @Test public void testDisMax2() throws Exception { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/disMax2.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(DisjunctionMaxQuery.class)); DisjunctionMaxQuery disjunctionMaxQuery = (DisjunctionMaxQuery) parsedQuery; List<Query> disjuncts = disjunctionMaxQuery.getDisjuncts(); assertThat(disjuncts.size(), equalTo(1)); PrefixQuery firstQ = (PrefixQuery) disjuncts.get(0); // since age is automatically registered in data, we encode it as numeric assertThat(firstQ.getPrefix(), equalTo(new Term("name.first", "sh"))); assertThat((double) firstQ.getBoost(), closeTo(1.2, 0.00001)); } @Test public void testTermQueryBuilder() throws IOException { IndexQueryParserService queryParser = queryParser(); Query parsedQuery = queryParser.parse(termQuery("age", 34).buildAsBytes()).query(); TermQuery fieldQuery = unwrapTermQuery(parsedQuery, true); assertThat(fieldQuery.getTerm().bytes(), equalTo(indexedValueForSearch(34l))); } @Test public void testTermQuery() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/term.json"); TermQuery fieldQuery = unwrapTermQuery(queryParser.parse(query).query(), true); assertThat(fieldQuery.getTerm().bytes(), equalTo(indexedValueForSearch(34l))); } private static TermQuery unwrapTermQuery(Query q, boolean expectConstantWrapper) { if (expectConstantWrapper) { assertThat(q, instanceOf(ConstantScoreQuery.class)); q = ((ConstantScoreQuery) q).getQuery(); } assertThat(q, instanceOf(TermQuery.class)); return (TermQuery) q; } @Test public void testFuzzyQueryBuilder() throws IOException { IndexQueryParserService queryParser = queryParser(); Query parsedQuery = queryParser.parse(fuzzyQuery("name.first", "sh").buildAsBytes()).query(); assertThat(parsedQuery, instanceOf(FuzzyQuery.class)); FuzzyQuery fuzzyQuery = (FuzzyQuery) parsedQuery; assertThat(fuzzyQuery.getTerm(), equalTo(new Term("name.first", "sh"))); } @Test public void testFuzzyQuery() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/fuzzy.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(FuzzyQuery.class)); FuzzyQuery fuzzyQuery = (FuzzyQuery) parsedQuery; assertThat(fuzzyQuery.getTerm(), equalTo(new Term("name.first", "sh"))); } @Test public void testFuzzyQueryWithFieldsBuilder() throws IOException { IndexQueryParserService queryParser = queryParser(); Query parsedQuery = queryParser.parse(fuzzyQuery("name.first", "sh").fuzziness(Fuzziness.fromSimilarity(0.1f)).prefixLength(1).boost(2.0f).buildAsBytes()).query(); assertThat(parsedQuery, instanceOf(FuzzyQuery.class)); FuzzyQuery fuzzyQuery = (FuzzyQuery) parsedQuery; assertThat(fuzzyQuery.getTerm(), equalTo(new Term("name.first", "sh"))); assertThat(fuzzyQuery.getMaxEdits(), equalTo(FuzzyQuery.floatToEdits(0.1f, "sh".length()))); assertThat(fuzzyQuery.getPrefixLength(), equalTo(1)); assertThat(fuzzyQuery.getBoost(), equalTo(2.0f)); } @Test public void testFuzzyQueryWithFields() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/fuzzy-with-fields.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(FuzzyQuery.class)); FuzzyQuery fuzzyQuery = (FuzzyQuery) parsedQuery; assertThat(fuzzyQuery.getTerm(), equalTo(new Term("name.first", "sh"))); assertThat(fuzzyQuery.getMaxEdits(), equalTo(FuzzyQuery.floatToEdits(0.1f, "sh".length()))); assertThat(fuzzyQuery.getPrefixLength(), equalTo(1)); assertThat(fuzzyQuery.getBoost(), equalTo(2.0f)); } @Test public void testFuzzyQueryWithFields2() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/fuzzy-with-fields2.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(NumericRangeQuery.class)); NumericRangeQuery fuzzyQuery = (NumericRangeQuery) parsedQuery; assertThat(fuzzyQuery.getMin().longValue(), equalTo(7l)); assertThat(fuzzyQuery.getMax().longValue(), equalTo(17l)); } @Test public void testTermWithBoostQueryBuilder() throws IOException { IndexQueryParserService queryParser = queryParser(); Query parsedQuery = queryParser.parse(termQuery("age", 34).boost(2.0f)).query(); TermQuery fieldQuery = unwrapTermQuery(parsedQuery, true); assertThat(fieldQuery.getTerm().bytes(), equalTo(indexedValueForSearch(34l))); assertThat((double) parsedQuery.getBoost(), closeTo(2.0, 0.01)); } private BytesRef indexedValueForSearch(long value) { BytesRefBuilder bytesRef = new BytesRefBuilder(); NumericUtils.longToPrefixCoded(value, 0, bytesRef); // 0 because of // exact // match return bytesRef.get(); } @Test public void testTermWithBoostQuery() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/term-with-boost.json"); Query parsedQuery = queryParser.parse(query).query(); TermQuery fieldQuery = unwrapTermQuery(parsedQuery, true); assertThat(fieldQuery.getTerm().bytes(), equalTo(indexedValueForSearch(34l))); assertThat((double) parsedQuery.getBoost(), closeTo(2.0, 0.01)); } @Test public void testPrefixQueryBuilder() throws IOException { IndexQueryParserService queryParser = queryParser(); Query parsedQuery = queryParser.parse(prefixQuery("name.first", "sh")).query(); assertThat(parsedQuery, instanceOf(PrefixQuery.class)); PrefixQuery prefixQuery = (PrefixQuery) parsedQuery; // since age is automatically registered in data, we encode it as numeric assertThat(prefixQuery.getPrefix(), equalTo(new Term("name.first", "sh"))); } @Test public void testPrefixQuery() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/prefix.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(PrefixQuery.class)); PrefixQuery prefixQuery = (PrefixQuery) parsedQuery; // since age is automatically registered in data, we encode it as numeric assertThat(prefixQuery.getPrefix(), equalTo(new Term("name.first", "sh"))); } @Test public void testPrefixBoostQuery() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/prefix-boost.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(PrefixQuery.class)); PrefixQuery prefixQuery = (PrefixQuery) parsedQuery; // since age is automatically registered in data, we encode it as numeric assertThat(prefixQuery.getPrefix(), equalTo(new Term("name.first", "sh"))); assertThat((double) prefixQuery.getBoost(), closeTo(1.2, 0.00001)); } @Test public void testPrefiFilteredQueryBuilder() throws IOException { IndexQueryParserService queryParser = queryParser(); Query parsedQuery = queryParser.parse(filteredQuery(termQuery("name.first", "shay"), prefixFilter("name.first", "sh"))).query(); assertThat(parsedQuery, instanceOf(FilteredQuery.class)); FilteredQuery filteredQuery = (FilteredQuery) parsedQuery; QueryWrapperFilter filter = (QueryWrapperFilter) filteredQuery.getFilter(); PrefixQuery prefixQuery = (PrefixQuery) filter.getQuery(); assertThat(prefixQuery.getPrefix(), equalTo(new Term("name.first", "sh"))); } @Test public void testPrefiFilteredQuery() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/prefix-filter.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(FilteredQuery.class)); FilteredQuery filteredQuery = (FilteredQuery) parsedQuery; QueryWrapperFilter filter = (QueryWrapperFilter) filteredQuery.getFilter(); PrefixQuery prefixQuery = (PrefixQuery) filter.getQuery(); assertThat(prefixQuery.getPrefix(), equalTo(new Term("name.first", "sh"))); } @Test public void testPrefixNamedFilteredQuery() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/prefix-filter-named.json"); ParsedQuery parsedQuery = queryParser.parse(query); assertThat(parsedQuery.namedFilters().containsKey("test"), equalTo(true)); assertThat(parsedQuery.query(), instanceOf(FilteredQuery.class)); FilteredQuery filteredQuery = (FilteredQuery) parsedQuery.query(); QueryWrapperFilter filter = (QueryWrapperFilter) filteredQuery.getFilter(); PrefixQuery prefixFilter = (PrefixQuery) filter.getQuery(); assertThat(prefixFilter.getPrefix(), equalTo(new Term("name.first", "sh"))); } @Test public void testPrefixQueryBoostQueryBuilder() throws IOException { IndexQueryParserService queryParser = queryParser(); Query parsedQuery = queryParser.parse(prefixQuery("name.first", "sh").boost(2.0f)).query(); assertThat(parsedQuery, instanceOf(PrefixQuery.class)); PrefixQuery prefixQuery = (PrefixQuery) parsedQuery; assertThat(prefixQuery.getPrefix(), equalTo(new Term("name.first", "sh"))); assertThat((double) prefixQuery.getBoost(), closeTo(2.0, 0.01)); } @Test public void testPrefixQueryBoostQuery() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/prefix-with-boost.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(PrefixQuery.class)); PrefixQuery prefixQuery = (PrefixQuery) parsedQuery; assertThat(prefixQuery.getPrefix(), equalTo(new Term("name.first", "sh"))); assertThat((double) prefixQuery.getBoost(), closeTo(2.0, 0.01)); } @Test public void testPrefixQueryWithUnknownField() throws IOException { IndexQueryParserService queryParser = queryParser(); Query parsedQuery = queryParser.parse(prefixQuery("unknown", "sh")).query(); assertThat(parsedQuery, instanceOf(PrefixQuery.class)); PrefixQuery prefixQuery = (PrefixQuery) parsedQuery; assertThat(prefixQuery.getPrefix(), equalTo(new Term("unknown", "sh"))); assertThat(prefixQuery.getRewriteMethod(), notNullValue()); } @Test public void testRegexpQueryBuilder() throws IOException { IndexQueryParserService queryParser = queryParser(); Query parsedQuery = queryParser.parse(regexpQuery("name.first", "s.*y")).query(); assertThat(parsedQuery, instanceOf(RegexpQuery.class)); RegexpQuery regexpQuery = (RegexpQuery) parsedQuery; assertThat(regexpQuery.getField(), equalTo("name.first")); } @Test public void testRegexpQuery() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/regexp.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(RegexpQuery.class)); RegexpQuery regexpQuery = (RegexpQuery) parsedQuery; assertThat(regexpQuery.getField(), equalTo("name.first")); } @Test public void testRegexpQueryWithMaxDeterminizedStates() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/regexp-max-determinized-states.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(RegexpQuery.class)); RegexpQuery regexpQuery = (RegexpQuery) parsedQuery; assertThat(regexpQuery.getField(), equalTo("name.first")); } @Test public void testRegexpFilteredQuery() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/regexp-filter.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(FilteredQuery.class)); Filter filter = ((FilteredQuery) parsedQuery).getFilter(); assertThat(filter, instanceOf(QueryWrapperFilter.class)); Query q = ((QueryWrapperFilter) filter).getQuery(); assertThat(q, instanceOf(RegexpQuery.class)); RegexpQuery regexpQuery = (RegexpQuery) q; assertThat(regexpQuery.getField(), equalTo("name.first")); assertThat(regexpQuery.toString(), containsString("s.*y")); } @Test public void testRegexpFilteredQueryWithMaxDeterminizedStates() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/regexp-filter-max-determinized-states.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(FilteredQuery.class)); Filter filter = ((FilteredQuery) parsedQuery).getFilter(); assertThat(filter, instanceOf(QueryWrapperFilter.class)); Query q = ((QueryWrapperFilter) filter).getQuery(); assertThat(q, instanceOf(RegexpQuery.class)); RegexpQuery regexpQuery = (RegexpQuery) q; assertThat(regexpQuery.getField(), equalTo("name.first")); assertThat(regexpQuery.toString(), containsString("s.*y")); } @Test public void testNamedRegexpFilteredQuery() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/regexp-filter-named.json"); ParsedQuery parsedQuery = queryParser.parse(query); assertThat(parsedQuery.namedFilters().containsKey("test"), equalTo(true)); assertThat(parsedQuery.query(), instanceOf(FilteredQuery.class)); Filter filter = ((FilteredQuery) parsedQuery.query()).getFilter(); assertThat(filter, instanceOf(QueryWrapperFilter.class)); Query q = ((QueryWrapperFilter) filter).getQuery(); assertThat(q, instanceOf(RegexpQuery.class)); RegexpQuery regexpQuery = (RegexpQuery) q; assertThat(regexpQuery.getField(), equalTo("name.first")); assertThat(regexpQuery.toString(), containsString("s.*y")); } @Test public void testRegexpWithFlagsFilteredQuery() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/regexp-filter-flags.json"); ParsedQuery parsedQuery = queryParser.parse(query); assertThat(parsedQuery.query(), instanceOf(FilteredQuery.class)); Filter filter = ((FilteredQuery) parsedQuery.query()).getFilter(); assertThat(filter, instanceOf(QueryWrapperFilter.class)); Query q = ((QueryWrapperFilter) filter).getQuery(); assertThat(q, instanceOf(RegexpQuery.class)); RegexpQuery regexpQuery = (RegexpQuery) q; assertThat(regexpQuery.toString(), equalTo("name.first:/s.*y/")); } @Test public void testNamedAndCachedRegexpWithFlagsFilteredQuery() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/regexp-filter-flags-named-cached.json"); ParsedQuery parsedQuery = queryParser.parse(query); assertThat(parsedQuery.query(), instanceOf(FilteredQuery.class)); Filter filter = ((FilteredQuery) parsedQuery.query()).getFilter(); assertThat(filter, instanceOf(QueryWrapperFilter.class)); Query q = ((QueryWrapperFilter) filter).getQuery(); assertThat(q, instanceOf(RegexpQuery.class)); RegexpQuery regexpQuery = (RegexpQuery) q; assertThat(regexpQuery.toString(), equalTo("name.first:/s.*y/")); } @Test public void testRegexpBoostQuery() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/regexp-boost.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(RegexpQuery.class)); RegexpQuery regexpQuery = (RegexpQuery) parsedQuery; assertThat(regexpQuery.getField(), equalTo("name.first")); assertThat(regexpQuery.getBoost(), equalTo(1.2f)); } @Test public void testWildcardQueryBuilder() throws IOException { IndexQueryParserService queryParser = queryParser(); Query parsedQuery = queryParser.parse(wildcardQuery("name.first", "sh*")).query(); assertThat(parsedQuery, instanceOf(WildcardQuery.class)); WildcardQuery wildcardQuery = (WildcardQuery) parsedQuery; assertThat(wildcardQuery.getTerm(), equalTo(new Term("name.first", "sh*"))); } @Test public void testWildcardQuery() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/wildcard.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(WildcardQuery.class)); WildcardQuery wildcardQuery = (WildcardQuery) parsedQuery; assertThat(wildcardQuery.getTerm(), equalTo(new Term("name.first", "sh*"))); } @Test public void testWildcardBoostQuery() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/wildcard-boost.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(WildcardQuery.class)); WildcardQuery wildcardQuery = (WildcardQuery) parsedQuery; assertThat(wildcardQuery.getTerm(), equalTo(new Term("name.first", "sh*"))); assertThat((double) wildcardQuery.getBoost(), closeTo(1.2, 0.00001)); } @Test public void testRangeQueryBuilder() throws IOException { IndexQueryParserService queryParser = queryParser(); Query parsedQuery = queryParser.parse(rangeQuery("age").from(23).to(54).includeLower(true).includeUpper(false)).query(); // since age is automatically registered in data, we encode it as numeric assertThat(parsedQuery, instanceOf(NumericRangeQuery.class)); NumericRangeQuery rangeQuery = (NumericRangeQuery) parsedQuery; assertThat(rangeQuery.getField(), equalTo("age")); assertThat(rangeQuery.getMin().intValue(), equalTo(23)); assertThat(rangeQuery.getMax().intValue(), equalTo(54)); assertThat(rangeQuery.includesMin(), equalTo(true)); assertThat(rangeQuery.includesMax(), equalTo(false)); } @Test public void testRangeQuery() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/range.json"); Query parsedQuery = queryParser.parse(query).query(); // since age is automatically registered in data, we encode it as numeric assertThat(parsedQuery, instanceOf(NumericRangeQuery.class)); NumericRangeQuery rangeQuery = (NumericRangeQuery) parsedQuery; assertThat(rangeQuery.getField(), equalTo("age")); assertThat(rangeQuery.getMin().intValue(), equalTo(23)); assertThat(rangeQuery.getMax().intValue(), equalTo(54)); assertThat(rangeQuery.includesMin(), equalTo(true)); assertThat(rangeQuery.includesMax(), equalTo(false)); } @Test public void testRange2Query() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/range2.json"); Query parsedQuery = queryParser.parse(query).query(); // since age is automatically registered in data, we encode it as numeric assertThat(parsedQuery, instanceOf(NumericRangeQuery.class)); NumericRangeQuery rangeQuery = (NumericRangeQuery) parsedQuery; assertThat(rangeQuery.getField(), equalTo("age")); assertThat(rangeQuery.getMin().intValue(), equalTo(23)); assertThat(rangeQuery.getMax().intValue(), equalTo(54)); assertThat(rangeQuery.includesMin(), equalTo(true)); assertThat(rangeQuery.includesMax(), equalTo(false)); } @Test public void testRangeFilteredQueryBuilder() throws IOException { IndexQueryParserService queryParser = queryParser(); Query parsedQuery = queryParser.parse(filteredQuery(termQuery("name.first", "shay"), rangeFilter("age").from(23).to(54).includeLower(true).includeUpper(false))).query(); FilteredQuery expected = new FilteredQuery( new TermQuery(new Term("name.first", "shay")), Queries.wrap(NumericRangeQuery.newLongRange("age", 23L, 54L, true, false))); assertEquals(expected, parsedQuery); } @Test public void testRangeFilteredQuery() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/range-filter.json"); Query parsedQuery = queryParser.parse(query).query(); FilteredQuery expected = new FilteredQuery( new TermQuery(new Term("name.first", "shay")), Queries.wrap(NumericRangeQuery.newLongRange("age", 23L, 54L, true, false))); assertEquals(expected, parsedQuery); } @Test public void testRangeNamedFilteredQuery() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/range-filter-named.json"); Query parsedQuery = queryParser.parse(query).query(); FilteredQuery expected = new FilteredQuery( new TermQuery(new Term("name.first", "shay")), Queries.wrap(NumericRangeQuery.newLongRange("age", 23L, 54L, true, false))); assertEquals(expected, parsedQuery); } @Test public void testRangeFilteredQueryBuilder_executionFieldData() throws IOException { IndexQueryParserService queryParser = queryParser(); Query parsedQuery = queryParser.parse(filteredQuery(termQuery("name.first", "shay"), rangeFilter("age").from(23).to(54).includeLower(true).includeUpper(false).setExecution("fielddata"))).query(); assertThat(parsedQuery, instanceOf(FilteredQuery.class)); Filter filter = ((FilteredQuery) parsedQuery).getFilter(); assertThat(filter, instanceOf(NumericRangeFieldDataFilter.class)); NumericRangeFieldDataFilter<Number> rangeFilter = (NumericRangeFieldDataFilter<Number>) filter; assertThat(rangeFilter.getField(), equalTo("age")); assertThat(rangeFilter.getLowerVal().intValue(), equalTo(23)); assertThat(rangeFilter.getUpperVal().intValue(), equalTo(54)); assertThat(rangeFilter.isIncludeLower(), equalTo(true)); assertThat(rangeFilter.isIncludeUpper(), equalTo(false)); } @Test public void testBoolFilteredQueryBuilder() throws IOException { IndexQueryParserService queryParser = queryParser(); Query parsedQuery = queryParser.parse(filteredQuery(termQuery("name.first", "shay"), boolFilter().must(termFilter("name.first", "shay1"), termFilter("name.first", "shay4")).mustNot(termFilter("name.first", "shay2")).should(termFilter("name.first", "shay3")))).query(); BooleanQuery filter = new BooleanQuery(); filter.add(Queries.wrap(new TermQuery(new Term("name.first", "shay1"))), Occur.MUST); filter.add(Queries.wrap(new TermQuery(new Term("name.first", "shay4"))), Occur.MUST); filter.add(Queries.wrap(new TermQuery(new Term("name.first", "shay2"))), Occur.MUST_NOT); filter.add(Queries.wrap(new TermQuery(new Term("name.first", "shay3"))), Occur.SHOULD); filter.setMinimumNumberShouldMatch(1); FilteredQuery expected = new FilteredQuery( new TermQuery(new Term("name.first", "shay")), Queries.wrap(filter)); assertEquals(expected, parsedQuery); } @Test public void testBoolFilteredQuery() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/bool-filter.json"); Query parsedQuery = queryParser.parse(query).query(); BooleanQuery filter = new BooleanQuery(); filter.add(Queries.wrap(new TermQuery(new Term("name.first", "shay1"))), Occur.MUST); filter.add(Queries.wrap(new TermQuery(new Term("name.first", "shay4"))), Occur.MUST); filter.add(Queries.wrap(new TermQuery(new Term("name.first", "shay2"))), Occur.MUST_NOT); filter.add(Queries.wrap(new TermQuery(new Term("name.first", "shay3"))), Occur.SHOULD); filter.setMinimumNumberShouldMatch(1); FilteredQuery expected = new FilteredQuery( new TermQuery(new Term("name.first", "shay")), Queries.wrap(filter)); assertEquals(expected, parsedQuery); } @Test public void testAndFilteredQueryBuilder() throws IOException { IndexQueryParserService queryParser = queryParser(); Query parsedQuery = queryParser.parse(filteredQuery(matchAllQuery(), andFilter(termFilter("name.first", "shay1"), termFilter("name.first", "shay4")))).query(); BooleanQuery and = new BooleanQuery(); and.add(Queries.wrap(new TermQuery(new Term("name.first", "shay1"))), Occur.MUST); and.add(Queries.wrap(new TermQuery(new Term("name.first", "shay4"))), Occur.MUST); ConstantScoreQuery expected = new ConstantScoreQuery(Queries.wrap(and)); assertEquals(expected, parsedQuery); } @Test public void testAndFilteredQuery() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/and-filter.json"); Query parsedQuery = queryParser.parse(query).query(); BooleanQuery and = new BooleanQuery(); and.add(Queries.wrap(new TermQuery(new Term("name.first", "shay1"))), Occur.MUST); and.add(Queries.wrap(new TermQuery(new Term("name.first", "shay4"))), Occur.MUST); FilteredQuery expected = new FilteredQuery( new TermQuery(new Term("name.first", "shay")), Queries.wrap(and)); assertEquals(expected, parsedQuery); } @Test public void testAndNamedFilteredQuery() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/and-filter-named.json"); Query parsedQuery = queryParser.parse(query).query(); BooleanQuery and = new BooleanQuery(); and.add(Queries.wrap(new TermQuery(new Term("name.first", "shay1"))), Occur.MUST); and.add(Queries.wrap(new TermQuery(new Term("name.first", "shay4"))), Occur.MUST); FilteredQuery expected = new FilteredQuery( new TermQuery(new Term("name.first", "shay")), Queries.wrap(and)); assertEquals(expected, parsedQuery); } @Test public void testAndFilteredQuery2() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/and-filter2.json"); Query parsedQuery = queryParser.parse(query).query(); BooleanQuery and = new BooleanQuery(); and.add(Queries.wrap(new TermQuery(new Term("name.first", "shay1"))), Occur.MUST); and.add(Queries.wrap(new TermQuery(new Term("name.first", "shay4"))), Occur.MUST); FilteredQuery expected = new FilteredQuery( new TermQuery(new Term("name.first", "shay")), Queries.wrap(and)); assertEquals(expected, parsedQuery); } @Test public void testOrFilteredQueryBuilder() throws IOException { IndexQueryParserService queryParser = queryParser(); Query parsedQuery = queryParser.parse(filteredQuery(matchAllQuery(), orFilter(termFilter("name.first", "shay1"), termFilter("name.first", "shay4")))).query(); BooleanQuery or = new BooleanQuery(); or.add(Queries.wrap(new TermQuery(new Term("name.first", "shay1"))), Occur.SHOULD); or.add(Queries.wrap(new TermQuery(new Term("name.first", "shay4"))), Occur.SHOULD); ConstantScoreQuery expected = new ConstantScoreQuery(Queries.wrap(or)); assertEquals(expected, parsedQuery); } @Test public void testOrFilteredQuery() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/or-filter.json"); Query parsedQuery = queryParser.parse(query).query(); BooleanQuery or = new BooleanQuery(); or.add(Queries.wrap(new TermQuery(new Term("name.first", "shay1"))), Occur.SHOULD); or.add(Queries.wrap(new TermQuery(new Term("name.first", "shay4"))), Occur.SHOULD); FilteredQuery expected = new FilteredQuery( new TermQuery(new Term("name.first", "shay")), Queries.wrap(or)); assertEquals(expected, parsedQuery); } @Test public void testOrFilteredQuery2() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/or-filter2.json"); Query parsedQuery = queryParser.parse(query).query(); BooleanQuery or = new BooleanQuery(); or.add(Queries.wrap(new TermQuery(new Term("name.first", "shay1"))), Occur.SHOULD); or.add(Queries.wrap(new TermQuery(new Term("name.first", "shay4"))), Occur.SHOULD); FilteredQuery expected = new FilteredQuery( new TermQuery(new Term("name.first", "shay")), Queries.wrap(or)); assertEquals(expected, parsedQuery); } @Test public void testNotFilteredQueryBuilder() throws IOException { IndexQueryParserService queryParser = queryParser(); Query parsedQuery = queryParser.parse(filteredQuery(matchAllQuery(), notFilter(termFilter("name.first", "shay1")))).query(); ConstantScoreQuery expected = new ConstantScoreQuery(Queries.wrap(Queries.not(Queries.wrap(new TermQuery(new Term("name.first", "shay1")))))); assertEquals(expected, parsedQuery); } @Test public void testNotFilteredQuery() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/not-filter.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(FilteredQuery.class)); FilteredQuery expected = new FilteredQuery( new TermQuery(new Term("name.first", "shay")), Queries.wrap(Queries.not(Queries.wrap(new TermQuery(new Term("name.first", "shay1")))))); assertEquals(expected, parsedQuery); } @Test public void testNotFilteredQuery2() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/not-filter2.json"); Query parsedQuery = queryParser.parse(query).query(); FilteredQuery expected = new FilteredQuery( new TermQuery(new Term("name.first", "shay")), Queries.wrap(Queries.not(Queries.wrap(new TermQuery(new Term("name.first", "shay1")))))); assertEquals(expected, parsedQuery); } @Test public void testNotFilteredQuery3() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/not-filter3.json"); Query parsedQuery = queryParser.parse(query).query(); FilteredQuery expected = new FilteredQuery( new TermQuery(new Term("name.first", "shay")), Queries.wrap(Queries.not(Queries.wrap(new TermQuery(new Term("name.first", "shay1")))))); assertEquals(expected, parsedQuery); } @Test public void testBoostingQueryBuilder() throws IOException { IndexQueryParserService queryParser = queryParser(); Query parsedQuery = queryParser.parse(boostingQuery().positive(termQuery("field1", "value1")).negative(termQuery("field1", "value2")).negativeBoost(0.2f)).query(); assertThat(parsedQuery, instanceOf(BoostingQuery.class)); } @Test public void testBoostingQuery() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/boosting-query.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(BoostingQuery.class)); } @Test public void testQueryStringFuzzyNumeric() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/query2.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(NumericRangeQuery.class)); NumericRangeQuery fuzzyQuery = (NumericRangeQuery) parsedQuery; assertThat(fuzzyQuery.getMin().longValue(), equalTo(12l)); assertThat(fuzzyQuery.getMax().longValue(), equalTo(12l)); } @Test public void testBoolQueryBuilder() throws IOException { IndexQueryParserService queryParser = queryParser(); Query parsedQuery = queryParser.parse(boolQuery().must(termQuery("content", "test1")).must(termQuery("content", "test4")).mustNot(termQuery("content", "test2")).should(termQuery("content", "test3"))).query(); assertThat(parsedQuery, instanceOf(BooleanQuery.class)); BooleanQuery booleanQuery = (BooleanQuery) parsedQuery; BooleanClause[] clauses = booleanQuery.getClauses(); assertThat(clauses.length, equalTo(4)); assertThat(((TermQuery) clauses[0].getQuery()).getTerm(), equalTo(new Term("content", "test1"))); assertThat(clauses[0].getOccur(), equalTo(BooleanClause.Occur.MUST)); assertThat(((TermQuery) clauses[1].getQuery()).getTerm(), equalTo(new Term("content", "test4"))); assertThat(clauses[1].getOccur(), equalTo(BooleanClause.Occur.MUST)); assertThat(((TermQuery) clauses[2].getQuery()).getTerm(), equalTo(new Term("content", "test2"))); assertThat(clauses[2].getOccur(), equalTo(BooleanClause.Occur.MUST_NOT)); assertThat(((TermQuery) clauses[3].getQuery()).getTerm(), equalTo(new Term("content", "test3"))); assertThat(clauses[3].getOccur(), equalTo(BooleanClause.Occur.SHOULD)); } @Test public void testBoolQuery() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/bool.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(BooleanQuery.class)); BooleanQuery booleanQuery = (BooleanQuery) parsedQuery; BooleanClause[] clauses = booleanQuery.getClauses(); assertThat(clauses.length, equalTo(4)); assertThat(((TermQuery) clauses[0].getQuery()).getTerm(), equalTo(new Term("content", "test1"))); assertThat(clauses[0].getOccur(), equalTo(BooleanClause.Occur.MUST)); assertThat(((TermQuery) clauses[1].getQuery()).getTerm(), equalTo(new Term("content", "test4"))); assertThat(clauses[1].getOccur(), equalTo(BooleanClause.Occur.MUST)); assertThat(((TermQuery) clauses[2].getQuery()).getTerm(), equalTo(new Term("content", "test2"))); assertThat(clauses[2].getOccur(), equalTo(BooleanClause.Occur.MUST_NOT)); assertThat(((TermQuery) clauses[3].getQuery()).getTerm(), equalTo(new Term("content", "test3"))); assertThat(clauses[3].getOccur(), equalTo(BooleanClause.Occur.SHOULD)); } @Test public void testTermsQueryBuilder() throws IOException { IndexQueryParserService queryParser = queryParser(); Query parsedQuery = queryParser.parse(termsQuery("name.first", Lists.newArrayList("shay", "test"))).query(); assertThat(parsedQuery, instanceOf(BooleanQuery.class)); BooleanQuery booleanQuery = (BooleanQuery) parsedQuery; BooleanClause[] clauses = booleanQuery.getClauses(); assertThat(clauses.length, equalTo(2)); assertThat(((TermQuery) clauses[0].getQuery()).getTerm(), equalTo(new Term("name.first", "shay"))); assertThat(clauses[0].getOccur(), equalTo(BooleanClause.Occur.SHOULD)); assertThat(((TermQuery) clauses[1].getQuery()).getTerm(), equalTo(new Term("name.first", "test"))); assertThat(clauses[1].getOccur(), equalTo(BooleanClause.Occur.SHOULD)); } @Test public void testTermsQuery() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/terms-query.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(BooleanQuery.class)); BooleanQuery booleanQuery = (BooleanQuery) parsedQuery; BooleanClause[] clauses = booleanQuery.getClauses(); assertThat(clauses.length, equalTo(2)); assertThat(((TermQuery) clauses[0].getQuery()).getTerm(), equalTo(new Term("name.first", "shay"))); assertThat(clauses[0].getOccur(), equalTo(BooleanClause.Occur.SHOULD)); assertThat(((TermQuery) clauses[1].getQuery()).getTerm(), equalTo(new Term("name.first", "test"))); assertThat(clauses[1].getOccur(), equalTo(BooleanClause.Occur.SHOULD)); } @Test public void testTermsQueryWithMultipleFields() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = XContentFactory.jsonBuilder().startObject() .startObject("terms").array("foo", 123).array("bar", 456).endObject() .endObject().string(); try { queryParser.parse(query).query(); fail(); } catch (QueryParsingException ex) { assertThat(ex.getMessage(), equalTo("[test] [terms] query does not support multiple fields")); } } @Test public void testTermsFilterWithMultipleFields() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = XContentFactory.jsonBuilder().startObject() .startObject("filtered") .startObject("query").startObject("match_all").endObject().endObject() .startObject("filter").startObject("terms").array("foo", 123).array("bar", 456).endObject().endObject() .endObject().string(); try { queryParser.parse(query).query(); fail(); } catch (QueryParsingException ex) { assertThat(ex.getMessage(), equalTo("[test] [terms] filter does not support multiple fields")); } } @Test public void testInQuery() throws IOException { IndexQueryParserService queryParser = queryParser(); Query parsedQuery = queryParser.parse(termsQuery("name.first", Lists.newArrayList("test1", "test2", "test3"))).query(); assertThat(parsedQuery, instanceOf(BooleanQuery.class)); BooleanQuery booleanQuery = (BooleanQuery) parsedQuery; BooleanClause[] clauses = booleanQuery.getClauses(); assertThat(clauses.length, equalTo(3)); assertThat(((TermQuery) clauses[0].getQuery()).getTerm(), equalTo(new Term("name.first", "test1"))); assertThat(clauses[0].getOccur(), equalTo(BooleanClause.Occur.SHOULD)); assertThat(((TermQuery) clauses[1].getQuery()).getTerm(), equalTo(new Term("name.first", "test2"))); assertThat(clauses[1].getOccur(), equalTo(BooleanClause.Occur.SHOULD)); assertThat(((TermQuery) clauses[2].getQuery()).getTerm(), equalTo(new Term("name.first", "test3"))); assertThat(clauses[2].getOccur(), equalTo(BooleanClause.Occur.SHOULD)); } @Test public void testFilteredQueryBuilder() throws IOException { IndexQueryParserService queryParser = queryParser(); Query parsedQuery = queryParser.parse(filteredQuery(termQuery("name.first", "shay"), termFilter("name.last", "banon"))).query(); assertThat(parsedQuery, instanceOf(FilteredQuery.class)); FilteredQuery filteredQuery = (FilteredQuery) parsedQuery; assertThat(((TermQuery) filteredQuery.getQuery()).getTerm(), equalTo(new Term("name.first", "shay"))); assertThat(getTerm(filteredQuery.getFilter()), equalTo(new Term("name.last", "banon"))); } @Test public void testFilteredQuery() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/filtered-query.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(FilteredQuery.class)); FilteredQuery filteredQuery = (FilteredQuery) parsedQuery; assertThat(((TermQuery) filteredQuery.getQuery()).getTerm(), equalTo(new Term("name.first", "shay"))); assertThat(getTerm(filteredQuery.getFilter()), equalTo(new Term("name.last", "banon"))); } @Test public void testFilteredQuery2() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/filtered-query2.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(FilteredQuery.class)); FilteredQuery filteredQuery = (FilteredQuery) parsedQuery; assertThat(((TermQuery) filteredQuery.getQuery()).getTerm(), equalTo(new Term("name.first", "shay"))); assertThat(getTerm(filteredQuery.getFilter()), equalTo(new Term("name.last", "banon"))); } @Test public void testFilteredQuery3() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/filtered-query3.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(FilteredQuery.class)); FilteredQuery filteredQuery = (FilteredQuery) parsedQuery; assertThat(((TermQuery) filteredQuery.getQuery()).getTerm(), equalTo(new Term("name.first", "shay"))); QueryWrapperFilter filter = (QueryWrapperFilter) filteredQuery.getFilter(); NumericRangeQuery<?> rangeQuery = (NumericRangeQuery<?>) filter.getQuery(); assertThat(rangeQuery.getField(), equalTo("age")); assertThat(rangeQuery.getMin().intValue(), equalTo(23)); assertThat(rangeQuery.getMax().intValue(), equalTo(54)); } @Test public void testFilteredQuery4() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/filtered-query4.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(FilteredQuery.class)); FilteredQuery filteredQuery = (FilteredQuery) parsedQuery; WildcardQuery wildcardQuery = (WildcardQuery) filteredQuery.getQuery(); assertThat(wildcardQuery.getTerm(), equalTo(new Term("name.first", "sh*"))); assertThat((double) wildcardQuery.getBoost(), closeTo(1.1, 0.001)); assertThat(getTerm(filteredQuery.getFilter()), equalTo(new Term("name.last", "banon"))); } @Test public void testTermFilterQuery() throws Exception { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/term-filter.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(FilteredQuery.class)); FilteredQuery filteredQuery = (FilteredQuery) parsedQuery; QueryWrapperFilter filter = (QueryWrapperFilter) filteredQuery.getFilter(); TermQuery termFilter = (TermQuery) filter.getQuery(); assertThat(termFilter.getTerm().field(), equalTo("name.last")); assertThat(termFilter.getTerm().text(), equalTo("banon")); } @Test public void testTermNamedFilterQuery() throws Exception { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/term-filter-named.json"); ParsedQuery parsedQuery = queryParser.parse(query); assertThat(parsedQuery.namedFilters().containsKey("test"), equalTo(true)); assertThat(parsedQuery.query(), instanceOf(FilteredQuery.class)); FilteredQuery filteredQuery = (FilteredQuery) parsedQuery.query(); QueryWrapperFilter filter = (QueryWrapperFilter) filteredQuery.getFilter(); TermQuery termFilter = (TermQuery) filter.getQuery(); assertThat(termFilter.getTerm().field(), equalTo("name.last")); assertThat(termFilter.getTerm().text(), equalTo("banon")); } @Test public void testTermsFilterQueryBuilder() throws Exception { IndexQueryParserService queryParser = queryParser(); Query parsedQuery = queryParser.parse(filteredQuery(termQuery("name.first", "shay"), termsFilter("name.last", "banon", "kimchy"))).query(); assertThat(parsedQuery, instanceOf(FilteredQuery.class)); FilteredQuery filteredQuery = (FilteredQuery) parsedQuery; QueryWrapperFilter filter = (QueryWrapperFilter) filteredQuery.getFilter(); assertThat(filter.getQuery(), instanceOf(TermsQuery.class)); } @Test public void testTermsFilterQuery() throws Exception { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/terms-filter.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(FilteredQuery.class)); FilteredQuery filteredQuery = (FilteredQuery) parsedQuery; QueryWrapperFilter filter = (QueryWrapperFilter) filteredQuery.getFilter(); assertThat(filter.getQuery(), instanceOf(TermsQuery.class)); } @Test public void testTermsWithNameFilterQuery() throws Exception { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/terms-filter-named.json"); ParsedQuery parsedQuery = queryParser.parse(query); assertThat(parsedQuery.namedFilters().containsKey("test"), equalTo(true)); assertThat(parsedQuery.query(), instanceOf(FilteredQuery.class)); FilteredQuery filteredQuery = (FilteredQuery) parsedQuery.query(); QueryWrapperFilter filter = (QueryWrapperFilter) filteredQuery.getFilter(); assertThat(filter.getQuery(), instanceOf(TermsQuery.class)); } @Test public void testConstantScoreQueryBuilder() throws IOException { IndexQueryParserService queryParser = queryParser(); Query parsedQuery = queryParser.parse(constantScoreQuery(termFilter("name.last", "banon"))).query(); assertThat(parsedQuery, instanceOf(ConstantScoreQuery.class)); ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) parsedQuery; assertThat(getTerm(constantScoreQuery.getQuery()), equalTo(new Term("name.last", "banon"))); } @Test public void testConstantScoreQuery() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/constantScore-query.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(ConstantScoreQuery.class)); ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) parsedQuery; assertThat(getTerm(constantScoreQuery.getQuery()), equalTo(new Term("name.last", "banon"))); } @Test public void testCustomBoostFactorQueryBuilder_withFunctionScore() throws IOException { IndexQueryParserService queryParser = queryParser(); Query parsedQuery = queryParser.parse(functionScoreQuery(termQuery("name.last", "banon"), factorFunction(1.3f))).query(); assertThat(parsedQuery, instanceOf(FunctionScoreQuery.class)); FunctionScoreQuery functionScoreQuery = (FunctionScoreQuery) parsedQuery; assertThat(((TermQuery) functionScoreQuery.getSubQuery()).getTerm(), equalTo(new Term("name.last", "banon"))); assertThat((double) ((BoostScoreFunction) functionScoreQuery.getFunction()).getBoost(), closeTo(1.3, 0.001)); } @Test public void testCustomBoostFactorQueryBuilder_withFunctionScoreWithoutQueryGiven() throws IOException { IndexQueryParserService queryParser = queryParser(); Query parsedQuery = queryParser.parse(functionScoreQuery(factorFunction(1.3f))).query(); assertThat(parsedQuery, instanceOf(FunctionScoreQuery.class)); FunctionScoreQuery functionScoreQuery = (FunctionScoreQuery) parsedQuery; assertThat(functionScoreQuery.getSubQuery() instanceof MatchAllDocsQuery, equalTo(true)); assertThat((double) ((BoostScoreFunction) functionScoreQuery.getFunction()).getBoost(), closeTo(1.3, 0.001)); } @Test public void testSpanTermQueryBuilder() throws IOException { IndexQueryParserService queryParser = queryParser(); Query parsedQuery = queryParser.parse(spanTermQuery("age", 34)).query(); assertThat(parsedQuery, instanceOf(SpanTermQuery.class)); SpanTermQuery termQuery = (SpanTermQuery) parsedQuery; // since age is automatically registered in data, we encode it as numeric assertThat(termQuery.getTerm(), equalTo(new Term("age", longToPrefixCoded(34, 0)))); } @Test public void testSpanTermQuery() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/spanTerm.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(SpanTermQuery.class)); SpanTermQuery termQuery = (SpanTermQuery) parsedQuery; // since age is automatically registered in data, we encode it as numeric assertThat(termQuery.getTerm(), equalTo(new Term("age", longToPrefixCoded(34, 0)))); } @Test public void testSpanNotQueryBuilder() throws IOException { IndexQueryParserService queryParser = queryParser(); Query parsedQuery = queryParser.parse(spanNotQuery().include(spanTermQuery("age", 34)).exclude(spanTermQuery("age", 35))).query(); assertThat(parsedQuery, instanceOf(SpanNotQuery.class)); SpanNotQuery spanNotQuery = (SpanNotQuery) parsedQuery; // since age is automatically registered in data, we encode it as numeric assertThat(((SpanTermQuery) spanNotQuery.getInclude()).getTerm(), equalTo(new Term("age", longToPrefixCoded(34, 0)))); assertThat(((SpanTermQuery) spanNotQuery.getExclude()).getTerm(), equalTo(new Term("age", longToPrefixCoded(35, 0)))); } @Test public void testSpanNotQuery() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/spanNot.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(SpanNotQuery.class)); SpanNotQuery spanNotQuery = (SpanNotQuery) parsedQuery; // since age is automatically registered in data, we encode it as numeric assertThat(((SpanTermQuery) spanNotQuery.getInclude()).getTerm(), equalTo(new Term("age", longToPrefixCoded(34, 0)))); assertThat(((SpanTermQuery) spanNotQuery.getExclude()).getTerm(), equalTo(new Term("age", longToPrefixCoded(35, 0)))); } @Test public void testSpanFirstQueryBuilder() throws IOException { IndexQueryParserService queryParser = queryParser(); Query parsedQuery = queryParser.parse(spanFirstQuery(spanTermQuery("age", 34), 12)).query(); assertThat(parsedQuery, instanceOf(SpanFirstQuery.class)); SpanFirstQuery spanFirstQuery = (SpanFirstQuery) parsedQuery; // since age is automatically registered in data, we encode it as numeric assertThat(((SpanTermQuery) spanFirstQuery.getMatch()).getTerm(), equalTo(new Term("age", longToPrefixCoded(34, 0)))); assertThat(spanFirstQuery.getEnd(), equalTo(12)); } @Test public void testSpanFirstQuery() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/spanFirst.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(SpanFirstQuery.class)); SpanFirstQuery spanFirstQuery = (SpanFirstQuery) parsedQuery; // since age is automatically registered in data, we encode it as numeric assertThat(((SpanTermQuery) spanFirstQuery.getMatch()).getTerm(), equalTo(new Term("age", longToPrefixCoded(34, 0)))); assertThat(spanFirstQuery.getEnd(), equalTo(12)); } @Test public void testSpanNearQueryBuilder() throws IOException { IndexQueryParserService queryParser = queryParser(); Query parsedQuery = queryParser.parse(spanNearQuery().clause(spanTermQuery("age", 34)).clause(spanTermQuery("age", 35)).clause(spanTermQuery("age", 36)).slop(12).inOrder(false).collectPayloads(false)).query(); assertThat(parsedQuery, instanceOf(SpanNearQuery.class)); SpanNearQuery spanNearQuery = (SpanNearQuery) parsedQuery; assertThat(spanNearQuery.getClauses().length, equalTo(3)); assertThat(((SpanTermQuery) spanNearQuery.getClauses()[0]).getTerm(), equalTo(new Term("age", longToPrefixCoded(34, 0)))); assertThat(((SpanTermQuery) spanNearQuery.getClauses()[1]).getTerm(), equalTo(new Term("age", longToPrefixCoded(35, 0)))); assertThat(((SpanTermQuery) spanNearQuery.getClauses()[2]).getTerm(), equalTo(new Term("age", longToPrefixCoded(36, 0)))); assertThat(spanNearQuery.isInOrder(), equalTo(false)); } @Test public void testSpanNearQuery() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/spanNear.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(SpanNearQuery.class)); SpanNearQuery spanNearQuery = (SpanNearQuery) parsedQuery; assertThat(spanNearQuery.getClauses().length, equalTo(3)); assertThat(((SpanTermQuery) spanNearQuery.getClauses()[0]).getTerm(), equalTo(new Term("age", longToPrefixCoded(34, 0)))); assertThat(((SpanTermQuery) spanNearQuery.getClauses()[1]).getTerm(), equalTo(new Term("age", longToPrefixCoded(35, 0)))); assertThat(((SpanTermQuery) spanNearQuery.getClauses()[2]).getTerm(), equalTo(new Term("age", longToPrefixCoded(36, 0)))); assertThat(spanNearQuery.isInOrder(), equalTo(false)); } @Test public void testFieldMaskingSpanQuery() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/spanFieldMaskingTerm.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(SpanNearQuery.class)); SpanNearQuery spanNearQuery = (SpanNearQuery) parsedQuery; assertThat(spanNearQuery.getClauses().length, equalTo(3)); assertThat(((SpanTermQuery) spanNearQuery.getClauses()[0]).getTerm(), equalTo(new Term("age", longToPrefixCoded(34, 0)))); assertThat(((SpanTermQuery) spanNearQuery.getClauses()[1]).getTerm(), equalTo(new Term("age", longToPrefixCoded(35, 0)))); assertThat(((SpanTermQuery) ((FieldMaskingSpanQuery) spanNearQuery.getClauses()[2]).getMaskedQuery()).getTerm(), equalTo(new Term("age_1", "36"))); assertThat(spanNearQuery.isInOrder(), equalTo(false)); } @Test public void testSpanOrQueryBuilder() throws IOException { IndexQueryParserService queryParser = queryParser(); Query parsedQuery = queryParser.parse(spanOrQuery().clause(spanTermQuery("age", 34)).clause(spanTermQuery("age", 35)).clause(spanTermQuery("age", 36))).query(); assertThat(parsedQuery, instanceOf(SpanOrQuery.class)); SpanOrQuery spanOrQuery = (SpanOrQuery) parsedQuery; assertThat(spanOrQuery.getClauses().length, equalTo(3)); assertThat(((SpanTermQuery) spanOrQuery.getClauses()[0]).getTerm(), equalTo(new Term("age", longToPrefixCoded(34, 0)))); assertThat(((SpanTermQuery) spanOrQuery.getClauses()[1]).getTerm(), equalTo(new Term("age", longToPrefixCoded(35, 0)))); assertThat(((SpanTermQuery) spanOrQuery.getClauses()[2]).getTerm(), equalTo(new Term("age", longToPrefixCoded(36, 0)))); } @Test public void testSpanOrQuery() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/spanOr.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(SpanOrQuery.class)); SpanOrQuery spanOrQuery = (SpanOrQuery) parsedQuery; assertThat(spanOrQuery.getClauses().length, equalTo(3)); assertThat(((SpanTermQuery) spanOrQuery.getClauses()[0]).getTerm(), equalTo(new Term("age", longToPrefixCoded(34, 0)))); assertThat(((SpanTermQuery) spanOrQuery.getClauses()[1]).getTerm(), equalTo(new Term("age", longToPrefixCoded(35, 0)))); assertThat(((SpanTermQuery) spanOrQuery.getClauses()[2]).getTerm(), equalTo(new Term("age", longToPrefixCoded(36, 0)))); } @Test public void testSpanOrQuery2() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/spanOr2.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(SpanOrQuery.class)); SpanOrQuery spanOrQuery = (SpanOrQuery) parsedQuery; assertThat(spanOrQuery.getClauses().length, equalTo(3)); assertThat(((SpanTermQuery) spanOrQuery.getClauses()[0]).getTerm(), equalTo(new Term("age", longToPrefixCoded(34, 0)))); assertThat(((SpanTermQuery) spanOrQuery.getClauses()[1]).getTerm(), equalTo(new Term("age", longToPrefixCoded(35, 0)))); assertThat(((SpanTermQuery) spanOrQuery.getClauses()[2]).getTerm(), equalTo(new Term("age", longToPrefixCoded(36, 0)))); } @Test public void testSpanMultiTermWildcardQuery() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/span-multi-term-wildcard.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(SpanMultiTermQueryWrapper.class)); WildcardQuery expectedWrapped = new WildcardQuery(new Term("user", "ki*y")); expectedWrapped.setBoost(1.08f); SpanMultiTermQueryWrapper<MultiTermQuery> wrapper = (SpanMultiTermQueryWrapper<MultiTermQuery>) parsedQuery; assertThat(wrapper, equalTo(new SpanMultiTermQueryWrapper<MultiTermQuery>(expectedWrapped))); } @Test public void testSpanMultiTermPrefixQuery() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/span-multi-term-prefix.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(SpanMultiTermQueryWrapper.class)); PrefixQuery expectedWrapped = new PrefixQuery(new Term("user", "ki")); expectedWrapped.setBoost(1.08f); SpanMultiTermQueryWrapper<MultiTermQuery> wrapper = (SpanMultiTermQueryWrapper<MultiTermQuery>) parsedQuery; assertThat(wrapper, equalTo(new SpanMultiTermQueryWrapper<MultiTermQuery>(expectedWrapped))); } @Test public void testSpanMultiTermFuzzyTermQuery() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/span-multi-term-fuzzy-term.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(SpanMultiTermQueryWrapper.class)); SpanMultiTermQueryWrapper<MultiTermQuery> wrapper = (SpanMultiTermQueryWrapper<MultiTermQuery>) parsedQuery; assertThat(wrapper.getField(), equalTo("user")); } @Test public void testSpanMultiTermFuzzyRangeQuery() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/span-multi-term-fuzzy-range.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(SpanMultiTermQueryWrapper.class)); NumericRangeQuery<Long> expectedWrapped = NumericRangeQuery.newLongRange("age", NumberFieldMapper.Defaults.PRECISION_STEP_64_BIT, 7l, 17l, true, true); expectedWrapped.setBoost(2.0f); SpanMultiTermQueryWrapper<MultiTermQuery> wrapper = (SpanMultiTermQueryWrapper<MultiTermQuery>) parsedQuery; assertThat(wrapper, equalTo(new SpanMultiTermQueryWrapper<MultiTermQuery>(expectedWrapped))); } @Test public void testSpanMultiTermNumericRangeQuery() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/span-multi-term-range-numeric.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(SpanMultiTermQueryWrapper.class)); NumericRangeQuery<Long> expectedWrapped = NumericRangeQuery.newLongRange("age", NumberFieldMapper.Defaults.PRECISION_STEP_64_BIT, 10l, 20l, true, false); expectedWrapped.setBoost(2.0f); SpanMultiTermQueryWrapper<MultiTermQuery> wrapper = (SpanMultiTermQueryWrapper<MultiTermQuery>) parsedQuery; assertThat(wrapper, equalTo(new SpanMultiTermQueryWrapper<MultiTermQuery>(expectedWrapped))); } @Test public void testSpanMultiTermTermRangeQuery() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/span-multi-term-range-term.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(SpanMultiTermQueryWrapper.class)); TermRangeQuery expectedWrapped = TermRangeQuery.newStringRange("user", "alice", "bob", true, false); expectedWrapped.setBoost(2.0f); SpanMultiTermQueryWrapper<MultiTermQuery> wrapper = (SpanMultiTermQueryWrapper<MultiTermQuery>) parsedQuery; assertThat(wrapper, equalTo(new SpanMultiTermQueryWrapper<MultiTermQuery>(expectedWrapped))); } @Test public void testQueryFilterBuilder() throws Exception { IndexQueryParserService queryParser = queryParser(); Query parsedQuery = queryParser.parse(filteredQuery(termQuery("name.first", "shay"), queryFilter(termQuery("name.last", "banon")))).query(); assertThat(parsedQuery, instanceOf(FilteredQuery.class)); FilteredQuery filteredQuery = (FilteredQuery) parsedQuery; QueryWrapperFilter queryWrapperFilter = (QueryWrapperFilter) filteredQuery.getFilter(); Field field = QueryWrapperFilter.class.getDeclaredField("query"); field.setAccessible(true); Query wrappedQuery = (Query) field.get(queryWrapperFilter); assertThat(wrappedQuery, instanceOf(TermQuery.class)); assertThat(((TermQuery) wrappedQuery).getTerm(), equalTo(new Term("name.last", "banon"))); } @Test public void testQueryFilter() throws Exception { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/query-filter.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(FilteredQuery.class)); FilteredQuery filteredQuery = (FilteredQuery) parsedQuery; QueryWrapperFilter queryWrapperFilter = (QueryWrapperFilter) filteredQuery.getFilter(); Field field = QueryWrapperFilter.class.getDeclaredField("query"); field.setAccessible(true); Query wrappedQuery = (Query) field.get(queryWrapperFilter); assertThat(wrappedQuery, instanceOf(TermQuery.class)); assertThat(((TermQuery) wrappedQuery).getTerm(), equalTo(new Term("name.last", "banon"))); } @Test public void testFQueryFilter() throws Exception { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/fquery-filter.json"); ParsedQuery parsedQuery = queryParser.parse(query); assertThat(parsedQuery.namedFilters().containsKey("test"), equalTo(true)); assertThat(parsedQuery.query(), instanceOf(FilteredQuery.class)); FilteredQuery filteredQuery = (FilteredQuery) parsedQuery.query(); QueryWrapperFilter queryWrapperFilter = (QueryWrapperFilter) filteredQuery.getFilter(); Field field = QueryWrapperFilter.class.getDeclaredField("query"); field.setAccessible(true); Query wrappedQuery = (Query) field.get(queryWrapperFilter); assertThat(wrappedQuery, instanceOf(TermQuery.class)); assertThat(((TermQuery) wrappedQuery).getTerm(), equalTo(new Term("name.last", "banon"))); } @Test public void testMoreLikeThisBuilder() throws Exception { IndexQueryParserService queryParser = queryParser(); Query parsedQuery = queryParser.parse(moreLikeThisQuery("name.first", "name.last").likeText("something").minTermFreq(1).maxQueryTerms(12)).query(); assertThat(parsedQuery, instanceOf(MoreLikeThisQuery.class)); MoreLikeThisQuery mltQuery = (MoreLikeThisQuery) parsedQuery; assertThat(mltQuery.getMoreLikeFields()[0], equalTo("name.first")); assertThat(mltQuery.getLikeText(), equalTo("something")); assertThat(mltQuery.getMinTermFrequency(), equalTo(1)); assertThat(mltQuery.getMaxQueryTerms(), equalTo(12)); } @Test public void testMoreLikeThis() throws Exception { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/mlt.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(MoreLikeThisQuery.class)); MoreLikeThisQuery mltQuery = (MoreLikeThisQuery) parsedQuery; assertThat(mltQuery.getMoreLikeFields()[0], equalTo("name.first")); assertThat(mltQuery.getMoreLikeFields()[1], equalTo("name.last")); assertThat(mltQuery.getLikeText(), equalTo("something")); assertThat(mltQuery.getMinTermFrequency(), equalTo(1)); assertThat(mltQuery.getMaxQueryTerms(), equalTo(12)); } @Test public void testMoreLikeThisIds() throws Exception { MoreLikeThisQueryParser parser = (MoreLikeThisQueryParser) queryParser.queryParser("more_like_this"); parser.setFetchService(new MockMoreLikeThisFetchService()); IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/mlt-items.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(BooleanQuery.class)); BooleanQuery booleanQuery = (BooleanQuery) parsedQuery; assertThat(booleanQuery.getClauses().length, is(1)); BooleanClause itemClause = booleanQuery.getClauses()[0]; assertThat(itemClause.getOccur(), is(BooleanClause.Occur.SHOULD)); assertThat(itemClause.getQuery(), instanceOf(MoreLikeThisQuery.class)); MoreLikeThisQuery mltQuery = (MoreLikeThisQuery) itemClause.getQuery(); // check each Fields is for each item for (int id = 1; id <= 4; id++) { Fields fields = mltQuery.getLikeFields()[id - 1]; assertThat(termsToString(fields.terms("name.first")), is(String.valueOf(id))); assertThat(termsToString(fields.terms("name.last")), is(String.valueOf(id))); } } @Test public void testMLTPercentTermsToMatch() throws Exception { // setup for mocking fetching items MoreLikeThisQueryParser parser = (MoreLikeThisQueryParser) queryParser.queryParser("more_like_this"); parser.setFetchService(new MockMoreLikeThisFetchService()); // parsing the ES query IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/mlt-items.json"); BooleanQuery parsedQuery = (BooleanQuery) queryParser.parse(query).query(); // get MLT query, other clause is for include/exclude items MoreLikeThisQuery mltQuery = (MoreLikeThisQuery) parsedQuery.getClauses()[0].getQuery(); // all terms must match mltQuery.setMinimumShouldMatch("100%"); mltQuery.setMinWordLen(0); mltQuery.setMinDocFreq(0); // one document has all values MemoryIndex index = new MemoryIndex(); index.addField("name.first", "apache lucene", new WhitespaceAnalyzer()); index.addField("name.last", "1 2 3 4", new WhitespaceAnalyzer()); // two clauses, one for items and one for like_text if set BooleanQuery luceneQuery = (BooleanQuery) mltQuery.rewrite(index.createSearcher().getIndexReader()); BooleanClause[] clauses = luceneQuery.getClauses(); // check for items int minNumberShouldMatch = ((BooleanQuery) (clauses[0].getQuery())).getMinimumNumberShouldMatch(); assertThat(minNumberShouldMatch, is(4)); // and for like_text minNumberShouldMatch = ((BooleanQuery) (clauses[1].getQuery())).getMinimumNumberShouldMatch(); assertThat(minNumberShouldMatch, is(2)); } private static class MockMoreLikeThisFetchService extends MoreLikeThisFetchService { public MockMoreLikeThisFetchService() { super(null, ImmutableSettings.Builder.EMPTY_SETTINGS); } @Override public MultiTermVectorsResponse fetchResponse(MultiTermVectorsRequest items) throws IOException { MultiTermVectorsItemResponse[] responses = new MultiTermVectorsItemResponse[items.size()]; int i = 0; for (TermVectorsRequest item : items) { TermVectorsResponse response = new TermVectorsResponse(item.index(), item.type(), item.id()); response.setExists(true); Fields generatedFields = generateFields(item.selectedFields().toArray(Strings.EMPTY_ARRAY), item.id()); EnumSet<TermVectorsRequest.Flag> flags = EnumSet.of(TermVectorsRequest.Flag.Positions, TermVectorsRequest.Flag.Offsets); response.setFields(generatedFields, item.selectedFields(), flags, generatedFields); responses[i++] = new MultiTermVectorsItemResponse(response, null); } return new MultiTermVectorsResponse(responses); } } private static Fields generateFields(String[] fieldNames, String text) throws IOException { MemoryIndex index = new MemoryIndex(); for (String fieldName : fieldNames) { index.addField(fieldName, text, new WhitespaceAnalyzer()); } return MultiFields.getFields(index.createSearcher().getIndexReader()); } private static String termsToString(Terms terms) throws IOException { String strings = ""; TermsEnum termsEnum = terms.iterator(); CharsRefBuilder spare = new CharsRefBuilder(); BytesRef text; while((text = termsEnum.next()) != null) { spare.copyUTF8Bytes(text); String term = spare.toString(); strings += term; } return strings; } @Test public void testGeoDistanceFilterNamed() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geo_distance-named.json"); ParsedQuery parsedQuery = queryParser.parse(query); assertThat(parsedQuery.namedFilters().containsKey("test"), equalTo(true)); assertThat(parsedQuery.query(), instanceOf(ConstantScoreQuery.class)); ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) parsedQuery.query(); GeoDistanceFilter filter = (GeoDistanceFilter) constantScoreQuery.getQuery(); assertThat(filter.fieldName(), equalTo("location")); assertThat(filter.lat(), closeTo(40, 0.00001)); assertThat(filter.lon(), closeTo(-70, 0.00001)); assertThat(filter.distance(), closeTo(DistanceUnit.DEFAULT.convert(12, DistanceUnit.MILES), 0.00001)); } @Test public void testGeoDistanceFilter1() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geo_distance1.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(ConstantScoreQuery.class)); ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) parsedQuery; GeoDistanceFilter filter = (GeoDistanceFilter) constantScoreQuery.getQuery(); assertThat(filter.fieldName(), equalTo("location")); assertThat(filter.lat(), closeTo(40, 0.00001)); assertThat(filter.lon(), closeTo(-70, 0.00001)); assertThat(filter.distance(), closeTo(DistanceUnit.DEFAULT.convert(12, DistanceUnit.MILES), 0.00001)); } @Test public void testGeoDistanceFilter2() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geo_distance2.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(ConstantScoreQuery.class)); ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) parsedQuery; GeoDistanceFilter filter = (GeoDistanceFilter) constantScoreQuery.getQuery(); assertThat(filter.fieldName(), equalTo("location")); assertThat(filter.lat(), closeTo(40, 0.00001)); assertThat(filter.lon(), closeTo(-70, 0.00001)); assertThat(filter.distance(), closeTo(DistanceUnit.DEFAULT.convert(12, DistanceUnit.MILES), 0.00001)); } @Test public void testGeoDistanceFilter3() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geo_distance3.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(ConstantScoreQuery.class)); ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) parsedQuery; GeoDistanceFilter filter = (GeoDistanceFilter) constantScoreQuery.getQuery(); assertThat(filter.fieldName(), equalTo("location")); assertThat(filter.lat(), closeTo(40, 0.00001)); assertThat(filter.lon(), closeTo(-70, 0.00001)); assertThat(filter.distance(), closeTo(DistanceUnit.DEFAULT.convert(12, DistanceUnit.MILES), 0.00001)); } @Test public void testGeoDistanceFilter4() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geo_distance4.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(ConstantScoreQuery.class)); ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) parsedQuery; GeoDistanceFilter filter = (GeoDistanceFilter) constantScoreQuery.getQuery(); assertThat(filter.fieldName(), equalTo("location")); assertThat(filter.lat(), closeTo(40, 0.00001)); assertThat(filter.lon(), closeTo(-70, 0.00001)); assertThat(filter.distance(), closeTo(DistanceUnit.DEFAULT.convert(12, DistanceUnit.MILES), 0.00001)); } @Test public void testGeoDistanceFilter5() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geo_distance5.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(ConstantScoreQuery.class)); ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) parsedQuery; GeoDistanceFilter filter = (GeoDistanceFilter) constantScoreQuery.getQuery(); assertThat(filter.fieldName(), equalTo("location")); assertThat(filter.lat(), closeTo(40, 0.00001)); assertThat(filter.lon(), closeTo(-70, 0.00001)); assertThat(filter.distance(), closeTo(DistanceUnit.DEFAULT.convert(12, DistanceUnit.MILES), 0.00001)); } @Test public void testGeoDistanceFilter6() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geo_distance6.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(ConstantScoreQuery.class)); ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) parsedQuery; GeoDistanceFilter filter = (GeoDistanceFilter) constantScoreQuery.getQuery(); assertThat(filter.fieldName(), equalTo("location")); assertThat(filter.lat(), closeTo(40, 0.00001)); assertThat(filter.lon(), closeTo(-70, 0.00001)); assertThat(filter.distance(), closeTo(DistanceUnit.DEFAULT.convert(12, DistanceUnit.MILES), 0.00001)); } @Test public void testGeoDistanceFilter7() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geo_distance7.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(ConstantScoreQuery.class)); ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) parsedQuery; GeoDistanceFilter filter = (GeoDistanceFilter) constantScoreQuery.getQuery(); assertThat(filter.fieldName(), equalTo("location")); assertThat(filter.lat(), closeTo(40, 0.00001)); assertThat(filter.lon(), closeTo(-70, 0.00001)); assertThat(filter.distance(), closeTo(DistanceUnit.DEFAULT.convert(0.012, DistanceUnit.MILES), 0.00001)); } @Test public void testGeoDistanceFilter8() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geo_distance8.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(ConstantScoreQuery.class)); ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) parsedQuery; GeoDistanceFilter filter = (GeoDistanceFilter) constantScoreQuery.getQuery(); assertThat(filter.fieldName(), equalTo("location")); assertThat(filter.lat(), closeTo(40, 0.00001)); assertThat(filter.lon(), closeTo(-70, 0.00001)); assertThat(filter.distance(), closeTo(DistanceUnit.KILOMETERS.convert(12, DistanceUnit.MILES), 0.00001)); } @Test public void testGeoDistanceFilter9() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geo_distance9.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(ConstantScoreQuery.class)); ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) parsedQuery; GeoDistanceFilter filter = (GeoDistanceFilter) constantScoreQuery.getQuery(); assertThat(filter.fieldName(), equalTo("location")); assertThat(filter.lat(), closeTo(40, 0.00001)); assertThat(filter.lon(), closeTo(-70, 0.00001)); assertThat(filter.distance(), closeTo(DistanceUnit.DEFAULT.convert(12, DistanceUnit.MILES), 0.00001)); } @Test public void testGeoDistanceFilter10() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geo_distance10.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(ConstantScoreQuery.class)); ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) parsedQuery; GeoDistanceFilter filter = (GeoDistanceFilter) constantScoreQuery.getQuery(); assertThat(filter.fieldName(), equalTo("location")); assertThat(filter.lat(), closeTo(40, 0.00001)); assertThat(filter.lon(), closeTo(-70, 0.00001)); assertThat(filter.distance(), closeTo(DistanceUnit.DEFAULT.convert(12, DistanceUnit.MILES), 0.00001)); } @Test public void testGeoDistanceFilter11() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geo_distance11.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(ConstantScoreQuery.class)); ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) parsedQuery; GeoDistanceFilter filter = (GeoDistanceFilter) constantScoreQuery.getQuery(); assertThat(filter.fieldName(), equalTo("location")); assertThat(filter.lat(), closeTo(40, 0.00001)); assertThat(filter.lon(), closeTo(-70, 0.00001)); assertThat(filter.distance(), closeTo(DistanceUnit.DEFAULT.convert(12, DistanceUnit.MILES), 0.00001)); } @Test public void testGeoDistanceFilter12() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geo_distance12.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(ConstantScoreQuery.class)); ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) parsedQuery; GeoDistanceFilter filter = (GeoDistanceFilter) constantScoreQuery.getQuery(); assertThat(filter.fieldName(), equalTo("location")); assertThat(filter.lat(), closeTo(40, 0.00001)); assertThat(filter.lon(), closeTo(-70, 0.00001)); assertThat(filter.distance(), closeTo(DistanceUnit.DEFAULT.convert(12, DistanceUnit.MILES), 0.00001)); } @Test public void testGeoBoundingBoxFilterNamed() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geo_boundingbox-named.json"); ParsedQuery parsedQuery = queryParser.parse(query); assertThat(parsedQuery.query(), instanceOf(ConstantScoreQuery.class)); assertThat(parsedQuery.namedFilters().containsKey("test"), equalTo(true)); ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) parsedQuery.query(); InMemoryGeoBoundingBoxFilter filter = (InMemoryGeoBoundingBoxFilter) constantScoreQuery.getQuery(); assertThat(filter.fieldName(), equalTo("location")); assertThat(filter.topLeft().lat(), closeTo(40, 0.00001)); assertThat(filter.topLeft().lon(), closeTo(-70, 0.00001)); assertThat(filter.bottomRight().lat(), closeTo(30, 0.00001)); assertThat(filter.bottomRight().lon(), closeTo(-80, 0.00001)); } @Test public void testGeoBoundingBoxFilter1() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geo_boundingbox1.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(ConstantScoreQuery.class)); ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) parsedQuery; InMemoryGeoBoundingBoxFilter filter = (InMemoryGeoBoundingBoxFilter) constantScoreQuery.getQuery(); assertThat(filter.fieldName(), equalTo("location")); assertThat(filter.topLeft().lat(), closeTo(40, 0.00001)); assertThat(filter.topLeft().lon(), closeTo(-70, 0.00001)); assertThat(filter.bottomRight().lat(), closeTo(30, 0.00001)); assertThat(filter.bottomRight().lon(), closeTo(-80, 0.00001)); } @Test public void testGeoBoundingBoxFilter2() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geo_boundingbox2.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(ConstantScoreQuery.class)); ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) parsedQuery; InMemoryGeoBoundingBoxFilter filter = (InMemoryGeoBoundingBoxFilter) constantScoreQuery.getQuery(); assertThat(filter.fieldName(), equalTo("location")); assertThat(filter.topLeft().lat(), closeTo(40, 0.00001)); assertThat(filter.topLeft().lon(), closeTo(-70, 0.00001)); assertThat(filter.bottomRight().lat(), closeTo(30, 0.00001)); assertThat(filter.bottomRight().lon(), closeTo(-80, 0.00001)); } @Test public void testGeoBoundingBoxFilter3() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geo_boundingbox3.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(ConstantScoreQuery.class)); ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) parsedQuery; InMemoryGeoBoundingBoxFilter filter = (InMemoryGeoBoundingBoxFilter) constantScoreQuery.getQuery(); assertThat(filter.fieldName(), equalTo("location")); assertThat(filter.topLeft().lat(), closeTo(40, 0.00001)); assertThat(filter.topLeft().lon(), closeTo(-70, 0.00001)); assertThat(filter.bottomRight().lat(), closeTo(30, 0.00001)); assertThat(filter.bottomRight().lon(), closeTo(-80, 0.00001)); } @Test public void testGeoBoundingBoxFilter4() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geo_boundingbox4.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(ConstantScoreQuery.class)); ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) parsedQuery; InMemoryGeoBoundingBoxFilter filter = (InMemoryGeoBoundingBoxFilter) constantScoreQuery.getQuery(); assertThat(filter.fieldName(), equalTo("location")); assertThat(filter.topLeft().lat(), closeTo(40, 0.00001)); assertThat(filter.topLeft().lon(), closeTo(-70, 0.00001)); assertThat(filter.bottomRight().lat(), closeTo(30, 0.00001)); assertThat(filter.bottomRight().lon(), closeTo(-80, 0.00001)); } @Test public void testGeoBoundingBoxFilter5() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geo_boundingbox5.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(ConstantScoreQuery.class)); ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) parsedQuery; InMemoryGeoBoundingBoxFilter filter = (InMemoryGeoBoundingBoxFilter) constantScoreQuery.getQuery(); assertThat(filter.fieldName(), equalTo("location")); assertThat(filter.topLeft().lat(), closeTo(40, 0.00001)); assertThat(filter.topLeft().lon(), closeTo(-70, 0.00001)); assertThat(filter.bottomRight().lat(), closeTo(30, 0.00001)); assertThat(filter.bottomRight().lon(), closeTo(-80, 0.00001)); } @Test public void testGeoBoundingBoxFilter6() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geo_boundingbox6.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(ConstantScoreQuery.class)); ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) parsedQuery; InMemoryGeoBoundingBoxFilter filter = (InMemoryGeoBoundingBoxFilter) constantScoreQuery.getQuery(); assertThat(filter.fieldName(), equalTo("location")); assertThat(filter.topLeft().lat(), closeTo(40, 0.00001)); assertThat(filter.topLeft().lon(), closeTo(-70, 0.00001)); assertThat(filter.bottomRight().lat(), closeTo(30, 0.00001)); assertThat(filter.bottomRight().lon(), closeTo(-80, 0.00001)); } @Test public void testGeoPolygonNamedFilter() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geo_polygon-named.json"); ParsedQuery parsedQuery = queryParser.parse(query); assertThat(parsedQuery.namedFilters().containsKey("test"), equalTo(true)); assertThat(parsedQuery.query(), instanceOf(ConstantScoreQuery.class)); ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) parsedQuery.query(); GeoPolygonFilter filter = (GeoPolygonFilter) constantScoreQuery.getQuery(); assertThat(filter.fieldName(), equalTo("location")); assertThat(filter.points().length, equalTo(4)); assertThat(filter.points()[0].lat(), closeTo(40, 0.00001)); assertThat(filter.points()[0].lon(), closeTo(-70, 0.00001)); assertThat(filter.points()[1].lat(), closeTo(30, 0.00001)); assertThat(filter.points()[1].lon(), closeTo(-80, 0.00001)); assertThat(filter.points()[2].lat(), closeTo(20, 0.00001)); assertThat(filter.points()[2].lon(), closeTo(-90, 0.00001)); } @Test public void testGeoPolygonFilterParsingExceptions() throws IOException { String[] brokenFiles = new String[]{ "/org/elasticsearch/index/query/geo_polygon_exception_1.json", "/org/elasticsearch/index/query/geo_polygon_exception_2.json", "/org/elasticsearch/index/query/geo_polygon_exception_3.json", "/org/elasticsearch/index/query/geo_polygon_exception_4.json", "/org/elasticsearch/index/query/geo_polygon_exception_5.json" }; for (String brokenFile : brokenFiles) { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath(brokenFile); try { queryParser.parse(query).query(); fail("parsing a broken geo_polygon filter didn't fail as expected while parsing: " + brokenFile); } catch (QueryParsingException e) { // success! } } } @Test public void testGeoPolygonFilter1() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geo_polygon1.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(ConstantScoreQuery.class)); ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) parsedQuery; GeoPolygonFilter filter = (GeoPolygonFilter) constantScoreQuery.getQuery(); assertThat(filter.fieldName(), equalTo("location")); assertThat(filter.points().length, equalTo(4)); assertThat(filter.points()[0].lat(), closeTo(40, 0.00001)); assertThat(filter.points()[0].lon(), closeTo(-70, 0.00001)); assertThat(filter.points()[1].lat(), closeTo(30, 0.00001)); assertThat(filter.points()[1].lon(), closeTo(-80, 0.00001)); assertThat(filter.points()[2].lat(), closeTo(20, 0.00001)); assertThat(filter.points()[2].lon(), closeTo(-90, 0.00001)); } @Test public void testGeoPolygonFilter2() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geo_polygon2.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(ConstantScoreQuery.class)); ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) parsedQuery; GeoPolygonFilter filter = (GeoPolygonFilter) constantScoreQuery.getQuery(); assertThat(filter.fieldName(), equalTo("location")); assertThat(filter.points().length, equalTo(4)); assertThat(filter.points()[0].lat(), closeTo(40, 0.00001)); assertThat(filter.points()[0].lon(), closeTo(-70, 0.00001)); assertThat(filter.points()[1].lat(), closeTo(30, 0.00001)); assertThat(filter.points()[1].lon(), closeTo(-80, 0.00001)); assertThat(filter.points()[2].lat(), closeTo(20, 0.00001)); assertThat(filter.points()[2].lon(), closeTo(-90, 0.00001)); } @Test public void testGeoPolygonFilter3() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geo_polygon3.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(ConstantScoreQuery.class)); ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) parsedQuery; GeoPolygonFilter filter = (GeoPolygonFilter) constantScoreQuery.getQuery(); assertThat(filter.fieldName(), equalTo("location")); assertThat(filter.points().length, equalTo(4)); assertThat(filter.points()[0].lat(), closeTo(40, 0.00001)); assertThat(filter.points()[0].lon(), closeTo(-70, 0.00001)); assertThat(filter.points()[1].lat(), closeTo(30, 0.00001)); assertThat(filter.points()[1].lon(), closeTo(-80, 0.00001)); assertThat(filter.points()[2].lat(), closeTo(20, 0.00001)); assertThat(filter.points()[2].lon(), closeTo(-90, 0.00001)); } @Test public void testGeoPolygonFilter4() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geo_polygon4.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(ConstantScoreQuery.class)); ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) parsedQuery; GeoPolygonFilter filter = (GeoPolygonFilter) constantScoreQuery.getQuery(); assertThat(filter.fieldName(), equalTo("location")); assertThat(filter.points().length, equalTo(4)); assertThat(filter.points()[0].lat(), closeTo(40, 0.00001)); assertThat(filter.points()[0].lon(), closeTo(-70, 0.00001)); assertThat(filter.points()[1].lat(), closeTo(30, 0.00001)); assertThat(filter.points()[1].lon(), closeTo(-80, 0.00001)); assertThat(filter.points()[2].lat(), closeTo(20, 0.00001)); assertThat(filter.points()[2].lon(), closeTo(-90, 0.00001)); } @Test public void testGeoShapeFilter() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geoShape-filter.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(ConstantScoreQuery.class)); ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) parsedQuery; assertThat(constantScoreQuery.getQuery(), instanceOf(IntersectsPrefixTreeFilter.class)); } @Test public void testGeoShapeQuery() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/geoShape-query.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(ConstantScoreQuery.class)); ConstantScoreQuery csq = (ConstantScoreQuery) parsedQuery; assertThat(csq.getQuery(), instanceOf(IntersectsPrefixTreeFilter.class)); } @Test public void testCommonTermsQuery1() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/commonTerms-query1.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(ExtendedCommonTermsQuery.class)); ExtendedCommonTermsQuery ectQuery = (ExtendedCommonTermsQuery) parsedQuery; assertThat(ectQuery.getHighFreqMinimumNumberShouldMatchSpec(), nullValue()); assertThat(ectQuery.getLowFreqMinimumNumberShouldMatchSpec(), equalTo("2")); } @Test public void testCommonTermsQuery2() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/commonTerms-query2.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(ExtendedCommonTermsQuery.class)); ExtendedCommonTermsQuery ectQuery = (ExtendedCommonTermsQuery) parsedQuery; assertThat(ectQuery.getHighFreqMinimumNumberShouldMatchSpec(), equalTo("50%")); assertThat(ectQuery.getLowFreqMinimumNumberShouldMatchSpec(), equalTo("5<20%")); } @Test public void testCommonTermsQuery3() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/commonTerms-query3.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(ExtendedCommonTermsQuery.class)); ExtendedCommonTermsQuery ectQuery = (ExtendedCommonTermsQuery) parsedQuery; assertThat(ectQuery.getHighFreqMinimumNumberShouldMatchSpec(), nullValue()); assertThat(ectQuery.getLowFreqMinimumNumberShouldMatchSpec(), equalTo("2")); } @Test(expected = QueryParsingException.class) public void assureMalformedThrowsException() throws IOException { IndexQueryParserService queryParser; queryParser = queryParser(); String query; query = copyToStringFromClasspath("/org/elasticsearch/index/query/faulty-function-score-query.json"); Query parsedQuery = queryParser.parse(query).query(); } @Test public void testFilterParsing() throws IOException { IndexQueryParserService queryParser; queryParser = queryParser(); String query; query = copyToStringFromClasspath("/org/elasticsearch/index/query/function-filter-score-query.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat((double) (parsedQuery.getBoost()), Matchers.closeTo(3.0, 1.e-7)); } @Test public void testBadTypeMatchQuery() throws Exception { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/match-query-bad-type.json"); QueryParsingException expectedException = null; try { queryParser.parse(query).query(); } catch (QueryParsingException qpe) { expectedException = qpe; } assertThat(expectedException, notNullValue()); } @Test public void testMultiMatchQuery() throws Exception { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/multiMatch-query-simple.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(DisjunctionMaxQuery.class)); } @Test public void testBadTypeMultiMatchQuery() throws Exception { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/multiMatch-query-bad-type.json"); QueryParsingException expectedException = null; try { queryParser.parse(query).query(); } catch (QueryParsingException qpe) { expectedException = qpe; } assertThat(expectedException, notNullValue()); } @Test public void testMultiMatchQueryWithFieldsAsString() throws Exception { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/multiMatch-query-fields-as-string.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(BooleanQuery.class)); } @Test public void testSimpleQueryString() throws Exception { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/simple-query-string.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(BooleanQuery.class)); } @Test public void testMatchWithFuzzyTranspositions() throws Exception { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/match-with-fuzzy-transpositions.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(FuzzyQuery.class)); assertThat( ((FuzzyQuery) parsedQuery).getTranspositions(), equalTo(true)); } @Test public void testMatchWithoutFuzzyTranspositions() throws Exception { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/match-without-fuzzy-transpositions.json"); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(FuzzyQuery.class)); assertThat( ((FuzzyQuery) parsedQuery).getTranspositions(), equalTo(false)); } // https://github.com/elasticsearch/elasticsearch/issues/7240 @Test public void testEmptyBooleanQuery() throws Exception { IndexQueryParserService queryParser = queryParser(); String query = jsonBuilder().startObject().startObject("bool").endObject().endObject().string(); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(MatchAllDocsQuery.class)); } // https://github.com/elasticsearch/elasticsearch/issues/7240 @Test public void testEmptyBooleanQueryInsideFQuery() throws Exception { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/fquery-with-empty-bool-query.json"); XContentParser parser = XContentHelper.createParser(new BytesArray(query)); ParsedFilter parsedQuery = queryParser.parseInnerFilter(parser); assertThat(parsedQuery.filter(), instanceOf(QueryWrapperFilter.class)); //QueryWrapperFilter filter = parsedQuery.filter(); assertThat(((QueryWrapperFilter) parsedQuery.filter()).getQuery(), instanceOf(FilteredQuery.class)); QueryWrapperFilter inner = (QueryWrapperFilter) ((FilteredQuery) ((QueryWrapperFilter) parsedQuery.filter()).getQuery()).getFilter(); assertThat(inner.getQuery(), instanceOf(TermQuery.class)); TermQuery filter = (TermQuery) inner.getQuery(); assertThat(filter.getTerm().toString(), equalTo("text:apache")); } @Test public void testProperErrorMessageWhenTwoFunctionsDefinedInQueryBody() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/function-score-query-causing-NPE.json"); try { queryParser.parse(query).query(); fail("FunctionScoreQueryParser should throw an exception here because two functions in body are not allowed."); } catch (QueryParsingException e) { assertThat(e.getDetailedMessage(), containsString("Use functions[{...},...] if you want to define several functions.")); } } @Test public void testWeight1fStillProducesWeighFunction() throws IOException { IndexQueryParserService queryParser = queryParser(); String queryString = jsonBuilder().startObject() .startObject("function_score") .startArray("functions") .startObject() .startObject("field_value_factor") .field("field", "popularity") .endObject() .field("weight", 1.0) .endObject() .endArray() .endObject() .endObject().string(); IndexService indexService = createIndex("testidx", client().admin().indices().prepareCreate("testidx") .addMapping("doc",jsonBuilder().startObject() .startObject("properties") .startObject("popularity").field("type", "float").endObject() .endObject() .endObject())); SearchContext.setCurrent(createSearchContext(indexService)); Query query = queryParser.parse(queryString).query(); assertThat(query, instanceOf(FunctionScoreQuery.class)); assertThat(((FunctionScoreQuery) query).getFunction(), instanceOf(WeightFactorFunction.class)); SearchContext.removeCurrent(); } @Test public void testProperErrorMessagesForMisplacedWeightsAndFunctions() throws IOException { IndexQueryParserService queryParser = queryParser(); String query = jsonBuilder().startObject().startObject("function_score") .startArray("functions") .startObject().field("weight", 2).field("boost_factor",2).endObject() .endArray() .endObject().endObject().string(); try { queryParser.parse(query).query(); fail("Expect exception here because boost_factor must not have a weight"); } catch (QueryParsingException e) { assertThat(e.getDetailedMessage(), containsString(BoostScoreFunction.BOOST_WEIGHT_ERROR_MESSAGE)); } try { functionScoreQuery().add(factorFunction(2.0f).setWeight(2.0f)); fail("Expect exception here because boost_factor must not have a weight"); } catch (ElasticsearchIllegalArgumentException e) { assertThat(e.getDetailedMessage(), containsString(BoostScoreFunction.BOOST_WEIGHT_ERROR_MESSAGE)); } query = jsonBuilder().startObject().startObject("function_score") .startArray("functions") .startObject().field("boost_factor",2).endObject() .endArray() .field("weight", 2) .endObject().endObject().string(); try { queryParser.parse(query).query(); fail("Expect exception here because array of functions and one weight in body is not allowed."); } catch (QueryParsingException e) { assertThat(e.getDetailedMessage(), containsString("You can either define \"functions\":[...] or a single function, not both. Found \"functions\": [...] already, now encountering \"weight\".")); } query = jsonBuilder().startObject().startObject("function_score") .field("weight", 2) .startArray("functions") .startObject().field("boost_factor",2).endObject() .endArray() .endObject().endObject().string(); try { queryParser.parse(query).query(); fail("Expect exception here because array of functions and one weight in body is not allowed."); } catch (QueryParsingException e) { assertThat(e.getDetailedMessage(), containsString("You can either define \"functions\":[...] or a single function, not both. Found \"weight\" already, now encountering \"functions\": [...].")); } } // https://github.com/elasticsearch/elasticsearch/issues/6722 public void testEmptyBoolSubClausesIsMatchAll() throws ElasticsearchException, IOException { String query = copyToStringFromClasspath("/org/elasticsearch/index/query/bool-query-with-empty-clauses-for-parsing.json"); IndexService indexService = createIndex("testidx", client().admin().indices().prepareCreate("testidx") .addMapping("foo") .addMapping("test", "_parent", "type=foo")); SearchContext.setCurrent(createSearchContext(indexService)); IndexQueryParserService queryParser = indexService.queryParserService(); Query parsedQuery = queryParser.parse(query).query(); assertThat(parsedQuery, instanceOf(ConstantScoreQuery.class)); assertThat(((ConstantScoreQuery) parsedQuery).getQuery(), instanceOf(CustomQueryWrappingFilter.class)); assertThat(((CustomQueryWrappingFilter) ((ConstantScoreQuery) parsedQuery).getQuery()).getQuery(), instanceOf(ParentConstantScoreQuery.class)); assertThat(((CustomQueryWrappingFilter) ((ConstantScoreQuery) parsedQuery).getQuery()).getQuery().toString(), equalTo("parent_filter[foo](filtered(*:*)->cache(QueryWrapperFilter(_type:foo)))")); SearchContext.removeCurrent(); } /** * helper to extract term from TermQuery. */ private Term getTerm(Query query) { while (query instanceof QueryWrapperFilter) { query = ((QueryWrapperFilter) query).getQuery(); } TermQuery wrapped = (TermQuery) query; return wrapped.getTerm(); } }
apache-2.0
nterry/aws-sdk-java
aws-java-sdk-ecs/src/main/java/com/amazonaws/services/ecs/model/transform/CreateServiceRequestMarshaller.java
5385
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights * Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.ecs.model.transform; import java.io.ByteArrayInputStream; import java.util.Collections; import java.util.Map; import java.util.List; import java.util.regex.Pattern; import com.amazonaws.AmazonClientException; import com.amazonaws.Request; import com.amazonaws.DefaultRequest; import com.amazonaws.http.HttpMethodName; import com.amazonaws.services.ecs.model.*; import com.amazonaws.transform.Marshaller; import com.amazonaws.util.BinaryUtils; import com.amazonaws.util.StringUtils; import com.amazonaws.util.IdempotentUtils; import com.amazonaws.util.StringInputStream; import com.amazonaws.protocol.json.*; /** * CreateServiceRequest Marshaller */ public class CreateServiceRequestMarshaller implements Marshaller<Request<CreateServiceRequest>, CreateServiceRequest> { private final SdkJsonProtocolFactory protocolFactory; public CreateServiceRequestMarshaller(SdkJsonProtocolFactory protocolFactory) { this.protocolFactory = protocolFactory; } public Request<CreateServiceRequest> marshall( CreateServiceRequest createServiceRequest) { if (createServiceRequest == null) { throw new AmazonClientException( "Invalid argument passed to marshall(...)"); } Request<CreateServiceRequest> request = new DefaultRequest<CreateServiceRequest>( createServiceRequest, "AmazonECS"); request.addHeader("X-Amz-Target", "AmazonEC2ContainerServiceV20141113.CreateService"); request.setHttpMethod(HttpMethodName.POST); request.setResourcePath(""); try { final StructuredJsonGenerator jsonGenerator = protocolFactory .createGenerator(); jsonGenerator.writeStartObject(); if (createServiceRequest.getCluster() != null) { jsonGenerator.writeFieldName("cluster").writeValue( createServiceRequest.getCluster()); } if (createServiceRequest.getServiceName() != null) { jsonGenerator.writeFieldName("serviceName").writeValue( createServiceRequest.getServiceName()); } if (createServiceRequest.getTaskDefinition() != null) { jsonGenerator.writeFieldName("taskDefinition").writeValue( createServiceRequest.getTaskDefinition()); } com.amazonaws.internal.SdkInternalList<LoadBalancer> loadBalancersList = (com.amazonaws.internal.SdkInternalList<LoadBalancer>) createServiceRequest .getLoadBalancers(); if (!loadBalancersList.isEmpty() || !loadBalancersList.isAutoConstruct()) { jsonGenerator.writeFieldName("loadBalancers"); jsonGenerator.writeStartArray(); for (LoadBalancer loadBalancersListValue : loadBalancersList) { if (loadBalancersListValue != null) { LoadBalancerJsonMarshaller.getInstance().marshall( loadBalancersListValue, jsonGenerator); } } jsonGenerator.writeEndArray(); } if (createServiceRequest.getDesiredCount() != null) { jsonGenerator.writeFieldName("desiredCount").writeValue( createServiceRequest.getDesiredCount()); } if (createServiceRequest.getClientToken() != null) { jsonGenerator.writeFieldName("clientToken").writeValue( createServiceRequest.getClientToken()); } if (createServiceRequest.getRole() != null) { jsonGenerator.writeFieldName("role").writeValue( createServiceRequest.getRole()); } if (createServiceRequest.getDeploymentConfiguration() != null) { jsonGenerator.writeFieldName("deploymentConfiguration"); DeploymentConfigurationJsonMarshaller.getInstance().marshall( createServiceRequest.getDeploymentConfiguration(), jsonGenerator); } jsonGenerator.writeEndObject(); byte[] content = jsonGenerator.getBytes(); request.setContent(new ByteArrayInputStream(content)); request.addHeader("Content-Length", Integer.toString(content.length)); request.addHeader("Content-Type", jsonGenerator.getContentType()); } catch (Throwable t) { throw new AmazonClientException( "Unable to marshall request to JSON: " + t.getMessage(), t); } return request; } }
apache-2.0
tigerforest/tiger-forest
demo-http/src/main/java/com/xhh/demo/http/factory/CarFactory.java
568
package com.xhh.demo.http.factory; /** * CarFactory * * @author tiger * @version 1.0.0 createTime: 14-4-3 * @since 1.6 */ public class CarFactory { public static Car createCar (Class<? extends Car> c) { try { return c.newInstance(); } catch (Exception e) { e.printStackTrace(); } return null; } public static void main(String[] args) { Car car = CarFactory.createCar(BenzCar.class); car.desc(); Car c = CarFactory.createCar(FordCar.class); c.desc(); } }
apache-2.0
ryanemerson/activemq-artemis
artemis-server/src/main/java/org/apache/activemq/artemis/core/settings/impl/AddressSettings.java
27534
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.core.settings.impl; import java.io.Serializable; import org.apache.activemq.artemis.api.core.ActiveMQBuffer; import org.apache.activemq.artemis.api.core.SimpleString; import org.apache.activemq.artemis.core.journal.EncodingSupport; import org.apache.activemq.artemis.core.settings.Mergeable; import org.apache.activemq.artemis.utils.BufferHelper; /** * Configuration settings that are applied on the address level */ public class AddressSettings implements Mergeable<AddressSettings>, Serializable, EncodingSupport { private static final long serialVersionUID = 1607502280582336366L; /** * defaults used if null, this allows merging */ public static final long DEFAULT_MAX_SIZE_BYTES = -1; public static final AddressFullMessagePolicy DEFAULT_ADDRESS_FULL_MESSAGE_POLICY = AddressFullMessagePolicy.PAGE; public static final long DEFAULT_PAGE_SIZE = 10 * 1024 * 1024; public static final int DEFAULT_MAX_DELIVERY_ATTEMPTS = 10; public static final int DEFAULT_PAGE_MAX_CACHE = 5; public static final int DEFAULT_MESSAGE_COUNTER_HISTORY_DAY_LIMIT = 0; public static final long DEFAULT_REDELIVER_DELAY = 0L; public static final double DEFAULT_REDELIVER_MULTIPLIER = 1.0; public static final boolean DEFAULT_LAST_VALUE_QUEUE = false; public static final boolean DEFAULT_AUTO_CREATE_QUEUES = true; public static final boolean DEFAULT_AUTO_DELETE_QUEUES = true; public static final long DEFAULT_REDISTRIBUTION_DELAY = -1; public static final long DEFAULT_EXPIRY_DELAY = -1; public static final boolean DEFAULT_SEND_TO_DLA_ON_NO_ROUTE = false; public static final long DEFAULT_SLOW_CONSUMER_THRESHOLD = -1; public static final long DEFAULT_SLOW_CONSUMER_CHECK_PERIOD = 5; public static final SlowConsumerPolicy DEFAULT_SLOW_CONSUMER_POLICY = SlowConsumerPolicy.NOTIFY; private AddressFullMessagePolicy addressFullMessagePolicy = null; private Long maxSizeBytes = null; private Long pageSizeBytes = null; private Integer pageMaxCache = null; private Boolean dropMessagesWhenFull = null; private Integer maxDeliveryAttempts = null; private Integer messageCounterHistoryDayLimit = null; private Long redeliveryDelay = null; private Double redeliveryMultiplier = null; private Long maxRedeliveryDelay = null; private SimpleString deadLetterAddress = null; private SimpleString expiryAddress = null; private Long expiryDelay = AddressSettings.DEFAULT_EXPIRY_DELAY; private Boolean lastValueQueue = null; private Long redistributionDelay = null; private Boolean sendToDLAOnNoRoute = null; private Long slowConsumerThreshold = null; private Long slowConsumerCheckPeriod = null; private SlowConsumerPolicy slowConsumerPolicy = null; private Boolean autoCreateJmsQueues = null; private Boolean autoDeleteJmsQueues = null; public AddressSettings(AddressSettings other) { this.addressFullMessagePolicy = other.addressFullMessagePolicy; this.maxSizeBytes = other.maxSizeBytes; this.pageSizeBytes = other.pageSizeBytes; this.pageMaxCache = other.pageMaxCache; this.dropMessagesWhenFull = other.dropMessagesWhenFull; this.maxDeliveryAttempts = other.maxDeliveryAttempts; this.messageCounterHistoryDayLimit = other.messageCounterHistoryDayLimit; this.redeliveryDelay = other.redeliveryDelay; this.redeliveryMultiplier = other.redeliveryMultiplier; this.maxRedeliveryDelay = other.maxRedeliveryDelay; this.deadLetterAddress = other.deadLetterAddress; this.expiryAddress = other.expiryAddress; this.expiryDelay = other.expiryDelay; this.lastValueQueue = other.lastValueQueue; this.redistributionDelay = other.redistributionDelay; this.sendToDLAOnNoRoute = other.sendToDLAOnNoRoute; this.slowConsumerThreshold = other.slowConsumerThreshold; this.slowConsumerCheckPeriod = other.slowConsumerCheckPeriod; this.slowConsumerPolicy = other.slowConsumerPolicy; this.autoCreateJmsQueues = other.autoCreateJmsQueues; this.autoDeleteJmsQueues = other.autoDeleteJmsQueues; } public AddressSettings() { } public boolean isAutoCreateJmsQueues() { return autoCreateJmsQueues != null ? autoCreateJmsQueues : AddressSettings.DEFAULT_AUTO_CREATE_QUEUES; } public AddressSettings setAutoCreateJmsQueues(final boolean autoCreateJmsQueues) { this.autoCreateJmsQueues = autoCreateJmsQueues; return this; } public boolean isAutoDeleteJmsQueues() { return autoDeleteJmsQueues != null ? autoDeleteJmsQueues : AddressSettings.DEFAULT_AUTO_DELETE_QUEUES; } public AddressSettings setAutoDeleteJmsQueues(final boolean autoDeleteJmsQueues) { this.autoDeleteJmsQueues = autoDeleteJmsQueues; return this; } public boolean isLastValueQueue() { return lastValueQueue != null ? lastValueQueue : AddressSettings.DEFAULT_LAST_VALUE_QUEUE; } public AddressSettings setLastValueQueue(final boolean lastValueQueue) { this.lastValueQueue = lastValueQueue; return this; } public AddressFullMessagePolicy getAddressFullMessagePolicy() { return addressFullMessagePolicy != null ? addressFullMessagePolicy : AddressSettings.DEFAULT_ADDRESS_FULL_MESSAGE_POLICY; } public AddressSettings setAddressFullMessagePolicy(final AddressFullMessagePolicy addressFullMessagePolicy) { this.addressFullMessagePolicy = addressFullMessagePolicy; return this; } public long getPageSizeBytes() { return pageSizeBytes != null ? pageSizeBytes : AddressSettings.DEFAULT_PAGE_SIZE; } public AddressSettings setPageSizeBytes(final long pageSize) { pageSizeBytes = pageSize; return this; } public int getPageCacheMaxSize() { return pageMaxCache != null ? pageMaxCache : AddressSettings.DEFAULT_PAGE_MAX_CACHE; } public AddressSettings setPageCacheMaxSize(final int pageMaxCache) { this.pageMaxCache = pageMaxCache; return this; } public long getMaxSizeBytes() { return maxSizeBytes != null ? maxSizeBytes : AddressSettings.DEFAULT_MAX_SIZE_BYTES; } public AddressSettings setMaxSizeBytes(final long maxSizeBytes) { this.maxSizeBytes = maxSizeBytes; return this; } public int getMaxDeliveryAttempts() { return maxDeliveryAttempts != null ? maxDeliveryAttempts : AddressSettings.DEFAULT_MAX_DELIVERY_ATTEMPTS; } public AddressSettings setMaxDeliveryAttempts(final int maxDeliveryAttempts) { this.maxDeliveryAttempts = maxDeliveryAttempts; return this; } public int getMessageCounterHistoryDayLimit() { return messageCounterHistoryDayLimit != null ? messageCounterHistoryDayLimit : AddressSettings.DEFAULT_MESSAGE_COUNTER_HISTORY_DAY_LIMIT; } public AddressSettings setMessageCounterHistoryDayLimit(final int messageCounterHistoryDayLimit) { this.messageCounterHistoryDayLimit = messageCounterHistoryDayLimit; return this; } public long getRedeliveryDelay() { return redeliveryDelay != null ? redeliveryDelay : AddressSettings.DEFAULT_REDELIVER_DELAY; } public AddressSettings setRedeliveryDelay(final long redeliveryDelay) { this.redeliveryDelay = redeliveryDelay; return this; } public double getRedeliveryMultiplier() { return redeliveryMultiplier != null ? redeliveryMultiplier : AddressSettings.DEFAULT_REDELIVER_MULTIPLIER; } public AddressSettings setRedeliveryMultiplier(final double redeliveryMultiplier) { this.redeliveryMultiplier = redeliveryMultiplier; return this; } public long getMaxRedeliveryDelay() { // default is redelivery-delay * 10 as specified on the docs and at this JIRA: // https://issues.jboss.org/browse/HORNETQ-1263 return maxRedeliveryDelay != null ? maxRedeliveryDelay : (getRedeliveryDelay() * 10); } public AddressSettings setMaxRedeliveryDelay(final long maxRedeliveryDelay) { this.maxRedeliveryDelay = maxRedeliveryDelay; return this; } public SimpleString getDeadLetterAddress() { return deadLetterAddress; } public AddressSettings setDeadLetterAddress(final SimpleString deadLetterAddress) { this.deadLetterAddress = deadLetterAddress; return this; } public SimpleString getExpiryAddress() { return expiryAddress; } public AddressSettings setExpiryAddress(final SimpleString expiryAddress) { this.expiryAddress = expiryAddress; return this; } public Long getExpiryDelay() { return expiryDelay; } public AddressSettings setExpiryDelay(final Long expiryDelay) { this.expiryDelay = expiryDelay; return this; } public boolean isSendToDLAOnNoRoute() { return sendToDLAOnNoRoute != null ? sendToDLAOnNoRoute : AddressSettings.DEFAULT_SEND_TO_DLA_ON_NO_ROUTE; } public AddressSettings setSendToDLAOnNoRoute(final boolean value) { sendToDLAOnNoRoute = value; return this; } public long getRedistributionDelay() { return redistributionDelay != null ? redistributionDelay : AddressSettings.DEFAULT_REDISTRIBUTION_DELAY; } public AddressSettings setRedistributionDelay(final long redistributionDelay) { this.redistributionDelay = redistributionDelay; return this; } public long getSlowConsumerThreshold() { return slowConsumerThreshold != null ? slowConsumerThreshold : AddressSettings.DEFAULT_SLOW_CONSUMER_THRESHOLD; } public AddressSettings setSlowConsumerThreshold(final long slowConsumerThreshold) { this.slowConsumerThreshold = slowConsumerThreshold; return this; } public long getSlowConsumerCheckPeriod() { return slowConsumerCheckPeriod != null ? slowConsumerCheckPeriod : AddressSettings.DEFAULT_SLOW_CONSUMER_CHECK_PERIOD; } public AddressSettings setSlowConsumerCheckPeriod(final long slowConsumerCheckPeriod) { this.slowConsumerCheckPeriod = slowConsumerCheckPeriod; return this; } public SlowConsumerPolicy getSlowConsumerPolicy() { return slowConsumerPolicy != null ? slowConsumerPolicy : AddressSettings.DEFAULT_SLOW_CONSUMER_POLICY; } public AddressSettings setSlowConsumerPolicy(final SlowConsumerPolicy slowConsumerPolicy) { this.slowConsumerPolicy = slowConsumerPolicy; return this; } /** * merge 2 objects in to 1 * * @param merged */ public void merge(final AddressSettings merged) { if (maxDeliveryAttempts == null) { maxDeliveryAttempts = merged.maxDeliveryAttempts; } if (dropMessagesWhenFull == null) { dropMessagesWhenFull = merged.dropMessagesWhenFull; } if (maxSizeBytes == null) { maxSizeBytes = merged.maxSizeBytes; } if (pageMaxCache == null) { pageMaxCache = merged.pageMaxCache; } if (pageSizeBytes == null) { pageSizeBytes = merged.getPageSizeBytes(); } if (messageCounterHistoryDayLimit == null) { messageCounterHistoryDayLimit = merged.messageCounterHistoryDayLimit; } if (redeliveryDelay == null) { redeliveryDelay = merged.redeliveryDelay; } if (redeliveryMultiplier == null) { redeliveryMultiplier = merged.redeliveryMultiplier; } if (maxRedeliveryDelay == null) { maxRedeliveryDelay = merged.maxRedeliveryDelay; } if (deadLetterAddress == null) { deadLetterAddress = merged.deadLetterAddress; } if (expiryAddress == null) { expiryAddress = merged.expiryAddress; } if (expiryDelay == null) { expiryDelay = merged.expiryDelay; } if (redistributionDelay == null) { redistributionDelay = merged.redistributionDelay; } if (sendToDLAOnNoRoute == null) { sendToDLAOnNoRoute = merged.sendToDLAOnNoRoute; } if (addressFullMessagePolicy == null) { addressFullMessagePolicy = merged.addressFullMessagePolicy; } if (slowConsumerThreshold == null) { slowConsumerThreshold = merged.slowConsumerThreshold; } if (slowConsumerCheckPeriod == null) { slowConsumerCheckPeriod = merged.slowConsumerCheckPeriod; } if (slowConsumerPolicy == null) { slowConsumerPolicy = merged.slowConsumerPolicy; } if (autoCreateJmsQueues == null) { autoCreateJmsQueues = merged.autoCreateJmsQueues; } if (autoDeleteJmsQueues == null) { autoDeleteJmsQueues = merged.autoDeleteJmsQueues; } } @Override public void decode(ActiveMQBuffer buffer) { SimpleString policyStr = buffer.readNullableSimpleString(); if (policyStr != null) { addressFullMessagePolicy = AddressFullMessagePolicy.valueOf(policyStr.toString()); } else { addressFullMessagePolicy = null; } maxSizeBytes = BufferHelper.readNullableLong(buffer); pageSizeBytes = BufferHelper.readNullableLong(buffer); pageMaxCache = BufferHelper.readNullableInteger(buffer); dropMessagesWhenFull = BufferHelper.readNullableBoolean(buffer); maxDeliveryAttempts = BufferHelper.readNullableInteger(buffer); messageCounterHistoryDayLimit = BufferHelper.readNullableInteger(buffer); redeliveryDelay = BufferHelper.readNullableLong(buffer); redeliveryMultiplier = BufferHelper.readNullableDouble(buffer); maxRedeliveryDelay = BufferHelper.readNullableLong(buffer); deadLetterAddress = buffer.readNullableSimpleString(); expiryAddress = buffer.readNullableSimpleString(); expiryDelay = BufferHelper.readNullableLong(buffer); lastValueQueue = BufferHelper.readNullableBoolean(buffer); redistributionDelay = BufferHelper.readNullableLong(buffer); sendToDLAOnNoRoute = BufferHelper.readNullableBoolean(buffer); slowConsumerThreshold = BufferHelper.readNullableLong(buffer); slowConsumerCheckPeriod = BufferHelper.readNullableLong(buffer); policyStr = buffer.readNullableSimpleString(); if (policyStr != null) { slowConsumerPolicy = SlowConsumerPolicy.valueOf(policyStr.toString()); } else { slowConsumerPolicy = null; } autoCreateJmsQueues = BufferHelper.readNullableBoolean(buffer); autoDeleteJmsQueues = BufferHelper.readNullableBoolean(buffer); } @Override public int getEncodeSize() { return BufferHelper.sizeOfNullableSimpleString(addressFullMessagePolicy != null ? addressFullMessagePolicy.toString() : null) + BufferHelper.sizeOfNullableLong(maxSizeBytes) + BufferHelper.sizeOfNullableLong(pageSizeBytes) + BufferHelper.sizeOfNullableInteger(pageMaxCache) + BufferHelper.sizeOfNullableBoolean(dropMessagesWhenFull) + BufferHelper.sizeOfNullableInteger(maxDeliveryAttempts) + BufferHelper.sizeOfNullableInteger(messageCounterHistoryDayLimit) + BufferHelper.sizeOfNullableLong(redeliveryDelay) + BufferHelper.sizeOfNullableDouble(redeliveryMultiplier) + BufferHelper.sizeOfNullableLong(maxRedeliveryDelay) + SimpleString.sizeofNullableString(deadLetterAddress) + SimpleString.sizeofNullableString(expiryAddress) + BufferHelper.sizeOfNullableLong(expiryDelay) + BufferHelper.sizeOfNullableBoolean(lastValueQueue) + BufferHelper.sizeOfNullableLong(redistributionDelay) + BufferHelper.sizeOfNullableBoolean(sendToDLAOnNoRoute) + BufferHelper.sizeOfNullableLong(slowConsumerCheckPeriod) + BufferHelper.sizeOfNullableLong(slowConsumerThreshold) + BufferHelper.sizeOfNullableSimpleString(slowConsumerPolicy != null ? slowConsumerPolicy.toString() : null) + BufferHelper.sizeOfNullableBoolean(autoCreateJmsQueues) + BufferHelper.sizeOfNullableBoolean(autoDeleteJmsQueues); } @Override public void encode(ActiveMQBuffer buffer) { buffer.writeNullableSimpleString(addressFullMessagePolicy != null ? new SimpleString(addressFullMessagePolicy.toString()) : null); BufferHelper.writeNullableLong(buffer, maxSizeBytes); BufferHelper.writeNullableLong(buffer, pageSizeBytes); BufferHelper.writeNullableInteger(buffer, pageMaxCache); BufferHelper.writeNullableBoolean(buffer, dropMessagesWhenFull); BufferHelper.writeNullableInteger(buffer, maxDeliveryAttempts); BufferHelper.writeNullableInteger(buffer, messageCounterHistoryDayLimit); BufferHelper.writeNullableLong(buffer, redeliveryDelay); BufferHelper.writeNullableDouble(buffer, redeliveryMultiplier); BufferHelper.writeNullableLong(buffer, maxRedeliveryDelay); buffer.writeNullableSimpleString(deadLetterAddress); buffer.writeNullableSimpleString(expiryAddress); BufferHelper.writeNullableLong(buffer, expiryDelay); BufferHelper.writeNullableBoolean(buffer, lastValueQueue); BufferHelper.writeNullableLong(buffer, redistributionDelay); BufferHelper.writeNullableBoolean(buffer, sendToDLAOnNoRoute); BufferHelper.writeNullableLong(buffer, slowConsumerThreshold); BufferHelper.writeNullableLong(buffer, slowConsumerCheckPeriod); buffer.writeNullableSimpleString(slowConsumerPolicy != null ? new SimpleString(slowConsumerPolicy.toString()) : null); BufferHelper.writeNullableBoolean(buffer, autoCreateJmsQueues); BufferHelper.writeNullableBoolean(buffer, autoDeleteJmsQueues); } /* (non-Javadoc) * @see java.lang.Object#hashCode() */ @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((addressFullMessagePolicy == null) ? 0 : addressFullMessagePolicy.hashCode()); result = prime * result + ((deadLetterAddress == null) ? 0 : deadLetterAddress.hashCode()); result = prime * result + ((dropMessagesWhenFull == null) ? 0 : dropMessagesWhenFull.hashCode()); result = prime * result + ((expiryAddress == null) ? 0 : expiryAddress.hashCode()); result = prime * result + ((expiryDelay == null) ? 0 : expiryDelay.hashCode()); result = prime * result + ((lastValueQueue == null) ? 0 : lastValueQueue.hashCode()); result = prime * result + ((maxDeliveryAttempts == null) ? 0 : maxDeliveryAttempts.hashCode()); result = prime * result + ((maxSizeBytes == null) ? 0 : maxSizeBytes.hashCode()); result = prime * result + ((messageCounterHistoryDayLimit == null) ? 0 : messageCounterHistoryDayLimit.hashCode()); result = prime * result + ((pageSizeBytes == null) ? 0 : pageSizeBytes.hashCode()); result = prime * result + ((pageMaxCache == null) ? 0 : pageMaxCache.hashCode()); result = prime * result + ((redeliveryDelay == null) ? 0 : redeliveryDelay.hashCode()); result = prime * result + ((redeliveryMultiplier == null) ? 0 : redeliveryMultiplier.hashCode()); result = prime * result + ((maxRedeliveryDelay == null) ? 0 : maxRedeliveryDelay.hashCode()); result = prime * result + ((redistributionDelay == null) ? 0 : redistributionDelay.hashCode()); result = prime * result + ((sendToDLAOnNoRoute == null) ? 0 : sendToDLAOnNoRoute.hashCode()); result = prime * result + ((slowConsumerThreshold == null) ? 0 : slowConsumerThreshold.hashCode()); result = prime * result + ((slowConsumerCheckPeriod == null) ? 0 : slowConsumerCheckPeriod.hashCode()); result = prime * result + ((slowConsumerPolicy == null) ? 0 : slowConsumerPolicy.hashCode()); result = prime * result + ((autoCreateJmsQueues == null) ? 0 : autoCreateJmsQueues.hashCode()); result = prime * result + ((autoDeleteJmsQueues == null) ? 0 : autoDeleteJmsQueues.hashCode()); return result; } /* (non-Javadoc) * @see java.lang.Object#equals(java.lang.Object) */ @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; AddressSettings other = (AddressSettings) obj; if (addressFullMessagePolicy == null) { if (other.addressFullMessagePolicy != null) return false; } else if (!addressFullMessagePolicy.equals(other.addressFullMessagePolicy)) return false; if (deadLetterAddress == null) { if (other.deadLetterAddress != null) return false; } else if (!deadLetterAddress.equals(other.deadLetterAddress)) return false; if (dropMessagesWhenFull == null) { if (other.dropMessagesWhenFull != null) return false; } else if (!dropMessagesWhenFull.equals(other.dropMessagesWhenFull)) return false; if (expiryAddress == null) { if (other.expiryAddress != null) return false; } else if (!expiryAddress.equals(other.expiryAddress)) return false; if (expiryDelay == null) { if (other.expiryDelay != null) return false; } else if (!expiryDelay.equals(other.expiryDelay)) return false; if (lastValueQueue == null) { if (other.lastValueQueue != null) return false; } else if (!lastValueQueue.equals(other.lastValueQueue)) return false; if (maxDeliveryAttempts == null) { if (other.maxDeliveryAttempts != null) return false; } else if (!maxDeliveryAttempts.equals(other.maxDeliveryAttempts)) return false; if (maxSizeBytes == null) { if (other.maxSizeBytes != null) return false; } else if (!maxSizeBytes.equals(other.maxSizeBytes)) return false; if (messageCounterHistoryDayLimit == null) { if (other.messageCounterHistoryDayLimit != null) return false; } else if (!messageCounterHistoryDayLimit.equals(other.messageCounterHistoryDayLimit)) return false; if (pageSizeBytes == null) { if (other.pageSizeBytes != null) return false; } else if (!pageSizeBytes.equals(other.pageSizeBytes)) return false; if (pageMaxCache == null) { if (other.pageMaxCache != null) return false; } else if (!pageMaxCache.equals(other.pageMaxCache)) return false; if (redeliveryDelay == null) { if (other.redeliveryDelay != null) return false; } else if (!redeliveryDelay.equals(other.redeliveryDelay)) return false; if (redeliveryMultiplier == null) { if (other.redeliveryMultiplier != null) return false; } else if (!redeliveryMultiplier.equals(other.redeliveryMultiplier)) return false; if (maxRedeliveryDelay == null) { if (other.maxRedeliveryDelay != null) return false; } else if (!maxRedeliveryDelay.equals(other.maxRedeliveryDelay)) return false; if (redistributionDelay == null) { if (other.redistributionDelay != null) return false; } else if (!redistributionDelay.equals(other.redistributionDelay)) return false; if (sendToDLAOnNoRoute == null) { if (other.sendToDLAOnNoRoute != null) return false; } else if (!sendToDLAOnNoRoute.equals(other.sendToDLAOnNoRoute)) return false; if (slowConsumerThreshold == null) { if (other.slowConsumerThreshold != null) return false; } else if (!slowConsumerThreshold.equals(other.slowConsumerThreshold)) return false; if (slowConsumerCheckPeriod == null) { if (other.slowConsumerCheckPeriod != null) return false; } else if (!slowConsumerCheckPeriod.equals(other.slowConsumerCheckPeriod)) return false; if (slowConsumerPolicy == null) { if (other.slowConsumerPolicy != null) return false; } else if (!slowConsumerPolicy.equals(other.slowConsumerPolicy)) return false; if (autoCreateJmsQueues == null) { if (other.autoCreateJmsQueues != null) return false; } else if (!autoCreateJmsQueues.equals(other.autoCreateJmsQueues)) return false; if (autoDeleteJmsQueues == null) { if (other.autoDeleteJmsQueues != null) return false; } else if (!autoDeleteJmsQueues.equals(other.autoDeleteJmsQueues)) return false; return true; } /* (non-Javadoc) * @see java.lang.Object#toString() */ @Override public String toString() { return "AddressSettings [addressFullMessagePolicy=" + addressFullMessagePolicy + ", deadLetterAddress=" + deadLetterAddress + ", dropMessagesWhenFull=" + dropMessagesWhenFull + ", expiryAddress=" + expiryAddress + ", expiryDelay=" + expiryDelay + ", lastValueQueue=" + lastValueQueue + ", maxDeliveryAttempts=" + maxDeliveryAttempts + ", maxSizeBytes=" + maxSizeBytes + ", messageCounterHistoryDayLimit=" + messageCounterHistoryDayLimit + ", pageSizeBytes=" + pageSizeBytes + ", pageMaxCache=" + pageMaxCache + ", redeliveryDelay=" + redeliveryDelay + ", redeliveryMultiplier=" + redeliveryMultiplier + ", maxRedeliveryDelay=" + maxRedeliveryDelay + ", redistributionDelay=" + redistributionDelay + ", sendToDLAOnNoRoute=" + sendToDLAOnNoRoute + ", slowConsumerThreshold=" + slowConsumerThreshold + ", slowConsumerCheckPeriod=" + slowConsumerCheckPeriod + ", slowConsumerPolicy=" + slowConsumerPolicy + ", autoCreateJmsQueues=" + autoCreateJmsQueues + ", autoDeleteJmsQueues=" + autoDeleteJmsQueues + "]"; } }
apache-2.0
tuxmea/one
src/oca/java/test/VirtualNetworkTest.java
6621
/******************************************************************************* * Copyright 2002-2015, OpenNebula Project (OpenNebula.org), C12G Labs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ import static org.junit.Assert.assertTrue; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.opennebula.client.Client; import org.opennebula.client.OneResponse; import org.opennebula.client.vnet.VirtualNetwork; import org.opennebula.client.vnet.VirtualNetworkPool; public class VirtualNetworkTest { private static VirtualNetwork vnet; private static VirtualNetworkPool vnetPool; private static Client client; private static OneResponse res; private static String name = "new_test_vnet"; private static String template = "NAME = " + name + "\n"+ "BRIDGE = vbr0\n" + "NETWORK_ADDRESS = 192.168.0.0\n"+ "AR = [ TYPE = IP4, IP = 192.168.0.1, SIZE = 254 ]\n"; private static String second_template = "NAME = \"Net number one\"\n" + "BRIDGE = br1\n" + "AR = [ TYPE = IP4, IP=130.10.0.1, SIZE = 1]"; /** * @throws java.lang.Exception */ @BeforeClass public static void setUpBeforeClass() throws Exception { client = new Client(); vnetPool = new VirtualNetworkPool(client); } /** * @throws java.lang.Exception */ @AfterClass public static void tearDownAfterClass() throws Exception { } /** * @throws java.lang.Exception */ @Before public void setUp() throws Exception { res = VirtualNetwork.allocate(client, template); int vnid = !res.isError() ? Integer.parseInt(res.getMessage()) : -1; vnet = new VirtualNetwork(vnid, client); } /** * @throws java.lang.Exception */ @After public void tearDown() throws Exception { vnet.delete(); } @Test public void allocate() { vnetPool.info(); boolean found = false; for(VirtualNetwork vn : vnetPool) { found = found || vn.getName().equals(name); } assertTrue( found ); } @Test public void info() { res = vnet.info(); assertTrue( res.getErrorMessage(), !res.isError() ); assertTrue( vnet.getName().equals(name) ); } @Test public void attributes() { res = vnet.info(); assertTrue( res.getErrorMessage(), !res.isError() ); assertTrue( vnet.xpath("NAME").equals(name) ); assertTrue( vnet.xpath("BRIDGE").equals("vbr0") ); } @Test public void delete() { res = vnet.delete(); assertTrue( res.getErrorMessage(), !res.isError() ); res = vnet.info(); assertTrue( res.isError() ); } @Test public void addAr() { res = VirtualNetwork.allocate(client, second_template); assertTrue( res.getErrorMessage(), !res.isError() ); VirtualNetwork second_vnet = new VirtualNetwork(Integer.parseInt(res.getMessage()), client); res = second_vnet.addAr("AR = [IP = 130.10.0.5, SIZE = 1, TYPE = IP4]"); assertTrue( res.getErrorMessage(), !res.isError() ); res = second_vnet.addAr("AR = [IP = 130.10.0.6, MAC = 50:20:20:20:20:20, SIZE = 1, TYPE = IP4]"); assertTrue( res.getErrorMessage(), !res.isError() ); res = second_vnet.addAr("130.10.0.6"); assertTrue( res.isError() ); second_vnet.delete(); } @Test public void rmAr() { res = VirtualNetwork.allocate(client, second_template); assertTrue( res.getErrorMessage(), !res.isError() ); VirtualNetwork second_vnet = new VirtualNetwork(Integer.parseInt(res.getMessage()), client); res = second_vnet.rmAr(0); assertTrue( res.getErrorMessage(), !res.isError() ); res = second_vnet.rmAr(0); assertTrue( res.isError() ); second_vnet.delete(); } @Test public void updateAr() { String new_template = "AR = [ AR_ID = 0, ATT2 = NEW_VAL, ATT3 = VAL3 ]"; res = vnet.updateAr(new_template); assertTrue( res.getErrorMessage(), !res.isError() ); res = vnet.info(); assertTrue( res.getErrorMessage(), !res.isError() ); assertTrue( vnet.xpath("AR_POOL/AR[AR_ID=0]/ATT1").equals( "" ) ); assertTrue( vnet.xpath("AR_POOL/AR[AR_ID=0]/ATT2").equals( "NEW_VAL" ) ); assertTrue( vnet.xpath("AR_POOL/AR[AR_ID=0]/ATT3").equals( "VAL3" ) ); } @Test public void hold() { res = vnet.hold("192.168.0.10"); assertTrue( res.getErrorMessage(), !res.isError() ); res = vnet.hold("192.168.0.11", 0); assertTrue( res.getErrorMessage(), !res.isError() ); res = vnet.hold("192.168.100.1"); assertTrue( res.isError() ); res = vnet.release("192.168.0.10"); assertTrue( res.getErrorMessage(), !res.isError() ); res = vnet.release("192.168.0.11", 0); assertTrue( res.getErrorMessage(), !res.isError() ); res = vnet.release("192.168.0.10"); assertTrue( res.getErrorMessage(), !res.isError() ); vnet.delete(); } @Test public void update() { String new_template = "ATT2 = NEW_VAL\n" + "ATT3 = VAL3"; res = vnet.update(new_template); assertTrue( res.getErrorMessage(), !res.isError() ); res = vnet.info(); assertTrue( res.getErrorMessage(), !res.isError() ); assertTrue( vnet.xpath("TEMPLATE/ATT1").equals( "" ) ); assertTrue( vnet.xpath("TEMPLATE/ATT2").equals( "NEW_VAL" ) ); assertTrue( vnet.xpath("TEMPLATE/ATT3").equals( "VAL3" ) ); } // TODO: reserve, free }
apache-2.0
dylanplecki/keycloak
saml/client-adapter/core/src/main/java/org/keycloak/adapters/saml/SamlConfigResolver.java
1452
/* * Copyright 2014 Red Hat Inc. and/or its affiliates and other contributors * as indicated by the @author tags. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.keycloak.adapters.saml; import org.keycloak.adapters.spi.HttpFacade.Request; /** * On multi-tenant scenarios, Keycloak will defer the resolution of a * SamlDeployment to the target application at the request-phase. * * A Request object is passed to the resolver and callers expect a complete * SamlDeployment. Based on this SamlDeployment, Keycloak will resume * authenticating and authorizing the request. * * The easiest way to build a SamlDeployment is to use * DeploymentBuilder , passing the InputStream of an existing * keycloak-saml.xml to the build() method. * * @author Juraci Paixão Kröhling <juraci at kroehling.de> */ public interface SamlConfigResolver { public SamlDeployment resolve(Request facade); }
apache-2.0
mr253727942/DSC
src/main/java/net/floodlightcontroller/core/IInfoProvider.java
1009
/** * Copyright 2011, Big Switch Networks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. **/ package net.floodlightcontroller.core; import java.util.Map; /** * * * @author Shudong Zhou */ public interface IInfoProvider { /** * Called when rest API requests information of a particular type * 当REST API请求一个特定类型的信息时被调用。 * @param type * @return */ public Map<String, Object> getInfo(String type); }
apache-2.0
sungsoo/optiq-project
core/src/test/java/net/hydromatic/optiq/test/SqlToRelConverterExtendedTest.java
2955
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.hydromatic.optiq.test; import net.hydromatic.optiq.SchemaPlus; import net.hydromatic.optiq.runtime.Hook; import net.hydromatic.optiq.tools.Frameworks; import org.eigenbase.rel.*; import org.eigenbase.relopt.RelOptCluster; import org.eigenbase.relopt.RelOptSchema; import org.eigenbase.test.SqlToRelConverterTest; import com.google.common.base.Function; import org.junit.After; import org.junit.Before; import java.io.IOException; /** * Runs {@link org.eigenbase.test.SqlToRelConverterTest} with extensions. */ public class SqlToRelConverterExtendedTest extends SqlToRelConverterTest { Hook.Closeable closeable; @Before public void before() { this.closeable = Hook.CONVERTED.addThread( new Function<RelNode, Void>() { public Void apply(RelNode a0) { foo(a0); return null; } }); } @After public void after() { if (this.closeable != null) { this.closeable.close(); this.closeable = null; } } public static void foo(RelNode rel) { // Convert rel tree to JSON. final RelJsonWriter writer = new RelJsonWriter(); rel.explain(writer); final String json = writer.asString(); // Find the schema. If there are no tables in the plan, we won't need one. final RelOptSchema[] schemas = {null}; rel.accept(new RelShuttleImpl() { @Override public RelNode visit(TableAccessRelBase scan) { schemas[0] = scan.getTable().getRelOptSchema(); return super.visit(scan); } }); // Convert JSON back to rel tree. Frameworks.withPlanner( new Frameworks.PlannerAction<Object>() { public Object apply(RelOptCluster cluster, RelOptSchema relOptSchema, SchemaPlus rootSchema) { final RelJsonReader reader = new RelJsonReader( cluster, schemas[0], rootSchema); try { RelNode x = reader.read(json); } catch (IOException e) { throw new RuntimeException(e); } return null; } }); } } // End SqlToRelConverterExtendedTest.java
apache-2.0
apache/incubator-shardingsphere
shardingsphere-test/shardingsphere-parser-test/src/main/java/org/apache/shardingsphere/test/sql/parser/parameterized/jaxb/cases/domain/statement/ddl/CreateDimensionStatementTestCase.java
1143
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.ddl; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.SQLParserTestCase; /** * Create dimension statement test case. */ public final class CreateDimensionStatementTestCase extends SQLParserTestCase { }
apache-2.0
RoyZeng/smslib
smslib/src/main/java/org/smslib/gateway/http/bulksms/BulkSmsDE.java
325
package org.smslib.gateway.http.bulksms; public class BulkSmsDE extends BulkSmsInternational { public BulkSmsDE(String gatewayId, String... parms) { super(gatewayId, parms[0], parms[1]); this.operatorId = "bulksms-de"; setDescription("BULKSMS DE (http://bulksms.de/)"); setBaseUrl("http://bulksms.de:5567"); } }
apache-2.0
maxkondr/onos-porta
core/api/src/main/java/org/onosproject/net/packet/PacketStore.java
1668
/* * Copyright 2014-2015 Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.net.packet; import org.onosproject.store.Store; import java.util.Set; /** * Manages routing of outbound packets. */ public interface PacketStore extends Store<PacketEvent, PacketStoreDelegate> { /** * Decides which instance should emit the packet and forwards the packet to * that instance. The relevant PacketManager is notified via the * PacketStoreDelegate that it should emit the packet. * * @param packet the packet to emit */ void emit(OutboundPacket packet); /** * Register a request for packets. If the registration * is successful the manager can proceed, otherwise it should * consider these packet already available in the system. * * @param request a packet request * @return a boolean indicating registration state. */ boolean requestPackets(PacketRequest request); /** * Obtains all existing requests in the system. * * @return a set of packet requests */ Set<PacketRequest> existingRequests(); }
apache-2.0
robertgeiger/incubator-geode
gemfire-core/src/test/java/com/gemstone/gemfire/cache/OperationJUnitTest.java
28984
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.gemstone.gemfire.cache; import org.junit.experimental.categories.Category; import junit.framework.TestCase; import com.gemstone.gemfire.cache.Operation; import com.gemstone.gemfire.test.junit.categories.UnitTest; @Category(UnitTest.class) public class OperationJUnitTest extends TestCase { public OperationJUnitTest(String name) { super(name); } public OperationJUnitTest() { // TODO Auto-generated constructor stub } protected void setUp() throws Exception { super.setUp(); } protected void tearDown() throws Exception { super.tearDown(); } /** * Check CREATE Operation. */ public void testCREATE() { Operation op = Operation.CREATE; assertTrue(op.isCreate()); assertFalse(op.isUpdate()); assertFalse(op.isInvalidate()); assertFalse(op.isDestroy()); assertFalse(op.isPutAll()); assertFalse(op.isRegionInvalidate()); assertFalse(op.isRegionDestroy()); assertFalse(op.isRegion()); assertFalse(op.isLocal()); assertTrue(op.isDistributed()); assertTrue(op.isEntry()); assertFalse(op.isExpiration()); assertFalse(op.isLocalLoad()); assertFalse(op.isNetLoad()); assertFalse(op.isLoad()); assertFalse(op.isNetSearch()); assertFalse(op.isClose()); assertFalse(op.isClear()); } /** * Check PUTALL_CREATE Operation. */ public void testPUTALL_CREATE() { Operation op = Operation.PUTALL_CREATE; assertTrue(op.isCreate()); assertFalse(op.isUpdate()); assertFalse(op.isInvalidate()); assertFalse(op.isDestroy()); assertTrue(op.isPutAll()); assertFalse(op.isRegionInvalidate()); assertFalse(op.isRegionDestroy()); assertFalse(op.isRegion()); assertFalse(op.isLocal()); assertTrue(op.isDistributed()); assertTrue(op.isEntry()); assertFalse(op.isExpiration()); assertFalse(op.isLocalLoad()); assertFalse(op.isNetLoad()); assertFalse(op.isLoad()); assertFalse(op.isNetSearch()); assertFalse(op.isClose()); assertFalse(op.isClear()); } /** * Check SEARCH_CREATE Operation. */ public void testSEARCH_CREATE() { Operation op = Operation.SEARCH_CREATE; assertTrue(op.isCreate()); assertFalse(op.isUpdate()); assertFalse(op.isInvalidate()); assertFalse(op.isDestroy()); assertFalse(op.isPutAll()); assertFalse(op.isRegionInvalidate()); assertFalse(op.isRegionDestroy()); assertFalse(op.isRegion()); assertFalse(op.isLocal()); assertTrue(op.isDistributed()); assertTrue(op.isEntry()); assertFalse(op.isExpiration()); assertFalse(op.isLocalLoad()); assertFalse(op.isNetLoad()); assertFalse(op.isLoad()); assertTrue(op.isNetSearch()); assertFalse(op.isClose()); assertFalse(op.isClear()); } /** * Check LOCAL_LOAD_CREATE Operation. */ public void testLOCAL_LOAD_CREATE() { Operation op = Operation.LOCAL_LOAD_CREATE; assertTrue(op.isCreate()); assertFalse(op.isUpdate()); assertFalse(op.isInvalidate()); assertFalse(op.isDestroy()); assertFalse(op.isPutAll()); assertFalse(op.isRegionInvalidate()); assertFalse(op.isRegionDestroy()); assertFalse(op.isRegion()); assertFalse(op.isLocal()); assertTrue(op.isDistributed()); assertTrue(op.isEntry()); assertFalse(op.isExpiration()); assertTrue(op.isLocalLoad()); assertFalse(op.isNetLoad()); assertTrue(op.isLoad()); assertFalse(op.isNetSearch()); assertFalse(op.isClose()); assertFalse(op.isClear()); } /** * Check NET_LOAD_CREATE Operation. */ public void testNET_LOAD_CREATE() { Operation op = Operation.NET_LOAD_CREATE; assertTrue(op.isCreate()); assertFalse(op.isUpdate()); assertFalse(op.isInvalidate()); assertFalse(op.isDestroy()); assertFalse(op.isPutAll()); assertFalse(op.isRegionInvalidate()); assertFalse(op.isRegionDestroy()); assertFalse(op.isRegion()); assertFalse(op.isLocal()); assertTrue(op.isDistributed()); assertTrue(op.isEntry()); assertFalse(op.isExpiration()); assertFalse(op.isLocalLoad()); assertTrue(op.isNetLoad()); assertTrue(op.isLoad()); assertFalse(op.isNetSearch()); assertFalse(op.isClose()); assertFalse(op.isClear()); } /** * Check UPDATE Operation. */ public void testUPDATE() { Operation op = Operation.UPDATE; assertFalse(op.isCreate()); assertTrue(op.isUpdate()); assertFalse(op.isInvalidate()); assertFalse(op.isDestroy()); assertFalse(op.isPutAll()); assertFalse(op.isRegionInvalidate()); assertFalse(op.isRegionDestroy()); assertFalse(op.isRegion()); assertFalse(op.isLocal()); assertTrue(op.isDistributed()); assertTrue(op.isEntry()); assertFalse(op.isExpiration()); assertFalse(op.isLocalLoad()); assertFalse(op.isNetLoad()); assertFalse(op.isLoad()); assertFalse(op.isNetSearch()); assertFalse(op.isClose()); assertFalse(op.isClear()); } /** * Check PUTALL_UPDATE Operation. */ public void testPUTALL_UPDATE() { Operation op = Operation.PUTALL_UPDATE; assertFalse(op.isCreate()); assertTrue(op.isUpdate()); assertFalse(op.isInvalidate()); assertFalse(op.isDestroy()); assertTrue(op.isPutAll()); assertFalse(op.isRegionInvalidate()); assertFalse(op.isRegionDestroy()); assertFalse(op.isRegion()); assertFalse(op.isLocal()); assertTrue(op.isDistributed()); assertTrue(op.isEntry()); assertFalse(op.isExpiration()); assertFalse(op.isLocalLoad()); assertFalse(op.isNetLoad()); assertFalse(op.isLoad()); assertFalse(op.isNetSearch()); assertFalse(op.isClose()); assertFalse(op.isClear()); } /** * Check SEARCH_UPDATE Operation. */ public void testSEARCH_UPDATE() { Operation op = Operation.SEARCH_UPDATE; assertFalse(op.isCreate()); assertTrue(op.isUpdate()); assertFalse(op.isInvalidate()); assertFalse(op.isDestroy()); assertFalse(op.isPutAll()); assertFalse(op.isRegionInvalidate()); assertFalse(op.isRegionDestroy()); assertFalse(op.isRegion()); assertFalse(op.isLocal()); assertTrue(op.isDistributed()); assertTrue(op.isEntry()); assertFalse(op.isExpiration()); assertFalse(op.isLocalLoad()); assertFalse(op.isNetLoad()); assertFalse(op.isLoad()); assertTrue(op.isNetSearch()); assertFalse(op.isClose()); assertFalse(op.isClear()); } /** * Check LOCAL_LOAD_UPDATE Operation. */ public void testLOCAL_LOAD_UPDATE() { Operation op = Operation.LOCAL_LOAD_UPDATE; assertFalse(op.isCreate()); assertTrue(op.isUpdate()); assertFalse(op.isInvalidate()); assertFalse(op.isDestroy()); assertFalse(op.isPutAll()); assertFalse(op.isRegionInvalidate()); assertFalse(op.isRegionDestroy()); assertFalse(op.isRegion()); assertFalse(op.isLocal()); assertTrue(op.isDistributed()); assertTrue(op.isEntry()); assertFalse(op.isExpiration()); assertTrue(op.isLocalLoad()); assertFalse(op.isNetLoad()); assertTrue(op.isLoad()); assertFalse(op.isNetSearch()); assertFalse(op.isClose()); assertFalse(op.isClear()); } /** * Check NET_LOAD_UPDATE Operation. */ public void testNET_LOAD_UPDATE() { Operation op = Operation.NET_LOAD_UPDATE; assertFalse(op.isCreate()); assertTrue(op.isUpdate()); assertFalse(op.isInvalidate()); assertFalse(op.isDestroy()); assertFalse(op.isPutAll()); assertFalse(op.isRegionInvalidate()); assertFalse(op.isRegionDestroy()); assertFalse(op.isRegion()); assertFalse(op.isLocal()); assertTrue(op.isDistributed()); assertTrue(op.isEntry()); assertFalse(op.isExpiration()); assertFalse(op.isLocalLoad()); assertTrue(op.isNetLoad()); assertTrue(op.isLoad()); assertFalse(op.isNetSearch()); assertFalse(op.isClose()); assertFalse(op.isClear()); } /** * Check INVALIDATE Operation. */ public void testINVALIDATE() { Operation op = Operation.INVALIDATE; assertFalse(op.isCreate()); assertFalse(op.isUpdate()); assertTrue(op.isInvalidate()); assertFalse(op.isDestroy()); assertFalse(op.isPutAll()); assertFalse(op.isRegionInvalidate()); assertFalse(op.isRegionDestroy()); assertFalse(op.isRegion()); assertFalse(op.isLocal()); assertTrue(op.isDistributed()); assertTrue(op.isEntry()); assertFalse(op.isExpiration()); assertFalse(op.isLocalLoad()); assertFalse(op.isNetLoad()); assertFalse(op.isLoad()); assertFalse(op.isNetSearch()); assertFalse(op.isClose()); assertFalse(op.isClear()); } /** * Check LOCAL_INVALIDATE Operation. */ public void testLOCAL_INVALIDATE() { Operation op = Operation.LOCAL_INVALIDATE; assertFalse(op.isCreate()); assertFalse(op.isUpdate()); assertTrue(op.isInvalidate()); assertFalse(op.isDestroy()); assertFalse(op.isPutAll()); assertFalse(op.isRegionInvalidate()); assertFalse(op.isRegionDestroy()); assertFalse(op.isRegion()); assertTrue(op.isLocal()); assertFalse(op.isDistributed()); assertTrue(op.isEntry()); assertFalse(op.isExpiration()); assertFalse(op.isLocalLoad()); assertFalse(op.isNetLoad()); assertFalse(op.isLoad()); assertFalse(op.isNetSearch()); assertFalse(op.isClose()); assertFalse(op.isClear()); } /** * Check DESTROY Operation. */ public void testDESTROY() { Operation op = Operation.DESTROY; assertFalse(op.isCreate()); assertFalse(op.isUpdate()); assertFalse(op.isInvalidate()); assertTrue(op.isDestroy()); assertFalse(op.isPutAll()); assertFalse(op.isRegionInvalidate()); assertFalse(op.isRegionDestroy()); assertFalse(op.isRegion()); assertFalse(op.isLocal()); assertTrue(op.isDistributed()); assertTrue(op.isEntry()); assertFalse(op.isExpiration()); assertFalse(op.isLocalLoad()); assertFalse(op.isNetLoad()); assertFalse(op.isLoad()); assertFalse(op.isNetSearch()); assertFalse(op.isClose()); assertFalse(op.isClear()); } /** * Check REMOVEALL Operation. */ public void testREMOVEALL() { Operation op = Operation.REMOVEALL_DESTROY; assertFalse(op.isCreate()); assertFalse(op.isUpdate()); assertFalse(op.isInvalidate()); assertTrue(op.isDestroy()); assertTrue(op.isRemoveAll()); assertFalse(op.isPutAll()); assertFalse(op.isRegionInvalidate()); assertFalse(op.isRegionDestroy()); assertFalse(op.isRegion()); assertFalse(op.isLocal()); assertTrue(op.isDistributed()); assertTrue(op.isEntry()); assertFalse(op.isExpiration()); assertFalse(op.isLocalLoad()); assertFalse(op.isNetLoad()); assertFalse(op.isLoad()); assertFalse(op.isNetSearch()); assertFalse(op.isClose()); assertFalse(op.isClear()); } /** * Check LOCAL_DESTROY Operation. */ public void testLOCAL_DESTROY() { Operation op = Operation.LOCAL_DESTROY; assertFalse(op.isCreate()); assertFalse(op.isUpdate()); assertFalse(op.isInvalidate()); assertTrue(op.isDestroy()); assertFalse(op.isPutAll()); assertFalse(op.isRegionInvalidate()); assertFalse(op.isRegionDestroy()); assertFalse(op.isRegion()); assertTrue(op.isLocal()); assertFalse(op.isDistributed()); assertTrue(op.isEntry()); assertFalse(op.isExpiration()); assertFalse(op.isLocalLoad()); assertFalse(op.isNetLoad()); assertFalse(op.isLoad()); assertFalse(op.isNetSearch()); assertFalse(op.isClose()); assertFalse(op.isClear()); } /** * Check EVICT_DESTROY Operation. */ public void testEVICT_DESTROY() { Operation op = Operation.EVICT_DESTROY; assertFalse(op.isCreate()); assertFalse(op.isUpdate()); assertFalse(op.isInvalidate()); assertTrue(op.isDestroy()); assertFalse(op.isPutAll()); assertFalse(op.isRegionInvalidate()); assertFalse(op.isRegionDestroy()); assertFalse(op.isRegion()); assertTrue(op.isLocal()); assertFalse(op.isDistributed()); assertTrue(op.isEntry()); assertFalse(op.isExpiration()); assertFalse(op.isLocalLoad()); assertFalse(op.isNetLoad()); assertFalse(op.isLoad()); assertFalse(op.isNetSearch()); assertFalse(op.isClose()); assertFalse(op.isClear()); } /** * Check REGION_LOAD_SNAPSHOT Operation. */ public void testREGION_LOAD_SNAPSHOT() { Operation op = Operation.REGION_LOAD_SNAPSHOT; assertFalse(op.isCreate()); assertFalse(op.isUpdate()); assertFalse(op.isInvalidate()); assertFalse(op.isDestroy()); assertFalse(op.isPutAll()); assertFalse(op.isRegionInvalidate()); assertTrue(op.isRegionDestroy()); assertTrue(op.isRegion()); assertFalse(op.isLocal()); assertTrue(op.isDistributed()); assertFalse(op.isEntry()); assertFalse(op.isExpiration()); assertFalse(op.isLocalLoad()); assertFalse(op.isNetLoad()); assertFalse(op.isLoad()); assertFalse(op.isNetSearch()); assertFalse(op.isClose()); assertFalse(op.isClear()); } /** * Check REGION_LOCAL_DESTROY Operation. */ public void testREGION_LOCAL_DESTROY() { Operation op = Operation.REGION_LOCAL_DESTROY; assertFalse(op.isCreate()); assertFalse(op.isUpdate()); assertFalse(op.isInvalidate()); assertFalse(op.isDestroy()); assertFalse(op.isPutAll()); assertFalse(op.isRegionInvalidate()); assertTrue(op.isRegionDestroy()); assertTrue(op.isRegion()); assertTrue(op.isLocal()); assertFalse(op.isDistributed()); assertFalse(op.isEntry()); assertFalse(op.isExpiration()); assertFalse(op.isLocalLoad()); assertFalse(op.isNetLoad()); assertFalse(op.isLoad()); assertFalse(op.isNetSearch()); assertFalse(op.isClose()); assertFalse(op.isClear()); } /** * Check REGION_CREATE Operation. */ public void testREGION_CREATE() { Operation op = Operation.REGION_CREATE; assertFalse(op.isCreate()); assertFalse(op.isUpdate()); assertFalse(op.isInvalidate()); assertFalse(op.isDestroy()); assertFalse(op.isPutAll()); assertFalse(op.isRegionInvalidate()); assertFalse(op.isRegionDestroy()); assertTrue(op.isRegion()); assertTrue(op.isLocal()); assertFalse(op.isDistributed()); assertFalse(op.isEntry()); assertFalse(op.isExpiration()); assertFalse(op.isLocalLoad()); assertFalse(op.isNetLoad()); assertFalse(op.isLoad()); assertFalse(op.isNetSearch()); assertFalse(op.isClose()); assertFalse(op.isClear()); } /** * Check REGION_CLOSE Operation. */ public void testREGION_CLOSE() { Operation op = Operation.REGION_CLOSE; assertFalse(op.isCreate()); assertFalse(op.isUpdate()); assertFalse(op.isInvalidate()); assertFalse(op.isDestroy()); assertFalse(op.isPutAll()); assertFalse(op.isRegionInvalidate()); assertTrue(op.isRegionDestroy()); assertTrue(op.isRegion()); assertTrue(op.isLocal()); assertFalse(op.isDistributed()); assertFalse(op.isEntry()); assertFalse(op.isExpiration()); assertFalse(op.isLocalLoad()); assertFalse(op.isNetLoad()); assertFalse(op.isLoad()); assertFalse(op.isNetSearch()); assertTrue(op.isClose()); assertFalse(op.isClear()); } /** * Check REGION_DESTROY Operation. */ public void testREGION_DESTROY() { Operation op = Operation.REGION_DESTROY; assertFalse(op.isCreate()); assertFalse(op.isUpdate()); assertFalse(op.isInvalidate()); assertFalse(op.isDestroy()); assertFalse(op.isPutAll()); assertFalse(op.isRegionInvalidate()); assertTrue(op.isRegionDestroy()); assertTrue(op.isRegion()); assertFalse(op.isLocal()); assertTrue(op.isDistributed()); assertFalse(op.isEntry()); assertFalse(op.isExpiration()); assertFalse(op.isLocalLoad()); assertFalse(op.isNetLoad()); assertFalse(op.isLoad()); assertFalse(op.isNetSearch()); assertFalse(op.isClose()); assertFalse(op.isClear()); } /** * Check EXPIRE_DESTROY Operation. */ public void testEXPIRE_DESTROY() { Operation op = Operation.EXPIRE_DESTROY; assertFalse(op.isCreate()); assertFalse(op.isUpdate()); assertFalse(op.isInvalidate()); assertTrue(op.isDestroy()); assertFalse(op.isPutAll()); assertFalse(op.isRegionInvalidate()); assertFalse(op.isRegionDestroy()); assertFalse(op.isRegion()); assertFalse(op.isLocal()); assertTrue(op.isDistributed()); assertTrue(op.isEntry()); assertTrue(op.isExpiration()); assertFalse(op.isLocalLoad()); assertFalse(op.isNetLoad()); assertFalse(op.isLoad()); assertFalse(op.isNetSearch()); assertFalse(op.isClose()); assertFalse(op.isClear()); } /** * Check EXPIRE_LOCAL_DESTROY Operation. */ public void testEXPIRE_LOCAL_DESTROY() { Operation op = Operation.EXPIRE_LOCAL_DESTROY; assertFalse(op.isCreate()); assertFalse(op.isUpdate()); assertFalse(op.isInvalidate()); assertTrue(op.isDestroy()); assertFalse(op.isPutAll()); assertFalse(op.isRegionInvalidate()); assertFalse(op.isRegionDestroy()); assertFalse(op.isRegion()); assertTrue(op.isLocal()); assertFalse(op.isDistributed()); assertTrue(op.isEntry()); assertTrue(op.isExpiration()); assertFalse(op.isLocalLoad()); assertFalse(op.isNetLoad()); assertFalse(op.isLoad()); assertFalse(op.isNetSearch()); assertFalse(op.isClose()); assertFalse(op.isClear()); } /** * Check EXPIRE_INVALIDATE Operation. */ public void testEXPIRE_INVALIDATE() { Operation op = Operation.EXPIRE_INVALIDATE; assertFalse(op.isCreate()); assertFalse(op.isUpdate()); assertTrue(op.isInvalidate()); assertFalse(op.isDestroy()); assertFalse(op.isPutAll()); assertFalse(op.isRegionInvalidate()); assertFalse(op.isRegionDestroy()); assertFalse(op.isRegion()); assertFalse(op.isLocal()); assertTrue(op.isDistributed()); assertTrue(op.isEntry()); assertTrue(op.isExpiration()); assertFalse(op.isLocalLoad()); assertFalse(op.isNetLoad()); assertFalse(op.isLoad()); assertFalse(op.isNetSearch()); assertFalse(op.isClose()); assertFalse(op.isClear()); } /** * Check EXPIRE_LOCAL_INVALIDATE Operation. */ public void testEXPIRE_LOCAL_INVALIDATE() { Operation op = Operation.EXPIRE_LOCAL_INVALIDATE; assertFalse(op.isCreate()); assertFalse(op.isUpdate()); assertTrue(op.isInvalidate()); assertFalse(op.isDestroy()); assertFalse(op.isPutAll()); assertFalse(op.isRegionInvalidate()); assertFalse(op.isRegionDestroy()); assertFalse(op.isRegion()); assertTrue(op.isLocal()); assertFalse(op.isDistributed()); assertTrue(op.isEntry()); assertTrue(op.isExpiration()); assertFalse(op.isLocalLoad()); assertFalse(op.isNetLoad()); assertFalse(op.isLoad()); assertFalse(op.isNetSearch()); assertFalse(op.isClose()); assertFalse(op.isClear()); } /** * Check REGION_EXPIRE_DESTROY Operation. */ public void testREGION_EXPIRE_DESTROY() { Operation op = Operation.REGION_EXPIRE_DESTROY; assertFalse(op.isCreate()); assertFalse(op.isUpdate()); assertFalse(op.isInvalidate()); assertFalse(op.isDestroy()); assertFalse(op.isPutAll()); assertFalse(op.isRegionInvalidate()); assertTrue(op.isRegionDestroy()); assertTrue(op.isRegion()); assertFalse(op.isLocal()); assertTrue(op.isDistributed()); assertFalse(op.isEntry()); assertTrue(op.isExpiration()); assertFalse(op.isLocalLoad()); assertFalse(op.isNetLoad()); assertFalse(op.isLoad()); assertFalse(op.isNetSearch()); assertFalse(op.isClose()); assertFalse(op.isClear()); } /** * Check REGION_EXPIRE_LOCAL_DESTROY Operation. */ public void testREGION_EXPIRE_LOCAL_DESTROY() { Operation op = Operation.REGION_EXPIRE_LOCAL_DESTROY; assertFalse(op.isCreate()); assertFalse(op.isUpdate()); assertFalse(op.isInvalidate()); assertFalse(op.isDestroy()); assertFalse(op.isPutAll()); assertFalse(op.isRegionInvalidate()); assertTrue(op.isRegionDestroy()); assertTrue(op.isRegion()); assertTrue(op.isLocal()); assertFalse(op.isDistributed()); assertFalse(op.isEntry()); assertTrue(op.isExpiration()); assertFalse(op.isLocalLoad()); assertFalse(op.isNetLoad()); assertFalse(op.isLoad()); assertFalse(op.isNetSearch()); assertFalse(op.isClose()); assertFalse(op.isClear()); } /** * Check REGION_EXPIRE_INVALIDATE Operation. */ public void testREGION_EXPIRE_INVALIDATE() { Operation op = Operation.REGION_EXPIRE_INVALIDATE; assertFalse(op.isCreate()); assertFalse(op.isUpdate()); assertFalse(op.isInvalidate()); assertFalse(op.isDestroy()); assertFalse(op.isPutAll()); assertTrue(op.isRegionInvalidate()); assertFalse(op.isRegionDestroy()); assertTrue(op.isRegion()); assertFalse(op.isLocal()); assertTrue(op.isDistributed()); assertFalse(op.isEntry()); assertTrue(op.isExpiration()); assertFalse(op.isLocalLoad()); assertFalse(op.isNetLoad()); assertFalse(op.isLoad()); assertFalse(op.isNetSearch()); assertFalse(op.isClose()); assertFalse(op.isClear()); } /** * Check REGION_EXPIRE_LOCAL_INVALIDATE Operation. */ public void testREGION_EXPIRE_LOCAL_INVALIDATE() { Operation op = Operation.REGION_EXPIRE_LOCAL_INVALIDATE; assertFalse(op.isCreate()); assertFalse(op.isUpdate()); assertFalse(op.isInvalidate()); assertFalse(op.isDestroy()); assertFalse(op.isPutAll()); assertTrue(op.isRegionInvalidate()); assertFalse(op.isRegionDestroy()); assertTrue(op.isRegion()); assertTrue(op.isLocal()); assertFalse(op.isDistributed()); assertFalse(op.isEntry()); assertTrue(op.isExpiration()); assertFalse(op.isLocalLoad()); assertFalse(op.isNetLoad()); assertFalse(op.isLoad()); assertFalse(op.isNetSearch()); assertFalse(op.isClose()); assertFalse(op.isClear()); } /** * Check REGION_LOCAL_INVALIDATE Operation. */ public void testREGION_LOCAL_INVALIDATE() { Operation op = Operation.REGION_LOCAL_INVALIDATE; assertFalse(op.isCreate()); assertFalse(op.isUpdate()); assertFalse(op.isInvalidate()); assertFalse(op.isDestroy()); assertFalse(op.isPutAll()); assertTrue(op.isRegionInvalidate()); assertFalse(op.isRegionDestroy()); assertTrue(op.isRegion()); assertTrue(op.isLocal()); assertFalse(op.isDistributed()); assertFalse(op.isEntry()); assertFalse(op.isExpiration()); assertFalse(op.isLocalLoad()); assertFalse(op.isNetLoad()); assertFalse(op.isLoad()); assertFalse(op.isNetSearch()); assertFalse(op.isClose()); assertFalse(op.isClear()); } /** * Check REGION_INVALIDATE Operation. */ public void testREGION_INVALIDATE() { Operation op = Operation.REGION_INVALIDATE; assertFalse(op.isCreate()); assertFalse(op.isUpdate()); assertFalse(op.isInvalidate()); assertFalse(op.isDestroy()); assertFalse(op.isPutAll()); assertTrue(op.isRegionInvalidate()); assertFalse(op.isRegionDestroy()); assertTrue(op.isRegion()); assertFalse(op.isLocal()); assertTrue(op.isDistributed()); assertFalse(op.isEntry()); assertFalse(op.isExpiration()); assertFalse(op.isLocalLoad()); assertFalse(op.isNetLoad()); assertFalse(op.isLoad()); assertFalse(op.isNetSearch()); assertFalse(op.isClose()); assertFalse(op.isClear()); } /** * Check REGION_CLEAR Operation. */ public void testREGION_CLEAR() { Operation op = Operation.REGION_CLEAR; assertFalse(op.isCreate()); assertFalse(op.isUpdate()); assertFalse(op.isInvalidate()); assertFalse(op.isDestroy()); assertFalse(op.isPutAll()); assertFalse(op.isRegionInvalidate()); assertFalse(op.isRegionDestroy()); assertTrue(op.isRegion()); assertFalse(op.isLocal()); assertTrue(op.isDistributed()); assertFalse(op.isEntry()); assertFalse(op.isExpiration()); assertFalse(op.isLocalLoad()); assertFalse(op.isNetLoad()); assertFalse(op.isLoad()); assertFalse(op.isNetSearch()); assertFalse(op.isClose()); assertTrue(op.isClear()); } /** * Check REGION_LOCAL_CLEAR Operation. */ public void testREGION_LOCAL_CLEAR() { Operation op = Operation.REGION_LOCAL_CLEAR; assertFalse(op.isCreate()); assertFalse(op.isUpdate()); assertFalse(op.isInvalidate()); assertFalse(op.isDestroy()); assertFalse(op.isPutAll()); assertFalse(op.isRegionInvalidate()); assertFalse(op.isRegionDestroy()); assertTrue(op.isRegion()); assertTrue(op.isLocal()); assertFalse(op.isDistributed()); assertFalse(op.isEntry()); assertFalse(op.isExpiration()); assertFalse(op.isLocalLoad()); assertFalse(op.isNetLoad()); assertFalse(op.isLoad()); assertFalse(op.isNetSearch()); assertFalse(op.isClose()); assertTrue(op.isClear()); } /** * Check CACHE_CREATE Operation */ public void testCACHE_CREATE() { Operation op = Operation.CACHE_CREATE; assertFalse(op.isCreate()); assertFalse(op.isUpdate()); assertFalse(op.isInvalidate()); assertFalse(op.isDestroy()); assertFalse(op.isPutAll()); assertFalse(op.isRegionInvalidate()); assertFalse(op.isRegionDestroy()); assertTrue(op.isRegion()); assertTrue(op.isLocal()); assertFalse(op.isDistributed()); assertFalse(op.isEntry()); assertFalse(op.isExpiration()); assertFalse(op.isLocalLoad()); assertFalse(op.isNetLoad()); assertFalse(op.isLoad()); assertFalse(op.isNetSearch()); assertFalse(op.isClose()); assertFalse(op.isClear()); } /** * Check CACHE_CLOSE Operation. */ public void testCACHE_CLOSE() { Operation op = Operation.CACHE_CLOSE; assertFalse(op.isCreate()); assertFalse(op.isUpdate()); assertFalse(op.isInvalidate()); assertFalse(op.isDestroy()); assertFalse(op.isPutAll()); assertFalse(op.isRegionInvalidate()); assertTrue(op.isRegionDestroy()); assertTrue(op.isRegion()); assertTrue(op.isLocal()); assertFalse(op.isDistributed()); assertFalse(op.isEntry()); assertFalse(op.isExpiration()); assertFalse(op.isLocalLoad()); assertFalse(op.isNetLoad()); assertFalse(op.isLoad()); assertFalse(op.isNetSearch()); assertTrue(op.isClose()); assertFalse(op.isClear()); } /** * Check REGION_REINITIALIZE Operation. */ public void testREGION_REINITIALIZE() { Operation op = Operation.REGION_REINITIALIZE; assertFalse(op.isCreate()); assertFalse(op.isUpdate()); assertFalse(op.isInvalidate()); assertFalse(op.isDestroy()); assertFalse(op.isPutAll()); assertFalse(op.isRegionInvalidate()); assertTrue(op.isRegionDestroy()); assertTrue(op.isRegion()); assertTrue(op.isLocal()); assertFalse(op.isDistributed()); assertFalse(op.isEntry()); assertFalse(op.isExpiration()); assertFalse(op.isLocalLoad()); assertFalse(op.isNetLoad()); assertFalse(op.isLoad()); assertFalse(op.isNetSearch()); assertFalse(op.isClose()); assertFalse(op.isClear()); } /** * Check UPDATE_VERSION Operation. */ public void testUPDATE_VERSION() { Operation op = Operation.UPDATE_VERSION_STAMP; assertFalse(op.isCreate()); assertFalse(op.isUpdate()); assertFalse(op.isInvalidate()); assertFalse(op.isDestroy()); assertFalse(op.isPutAll()); assertFalse(op.isRegionInvalidate()); assertFalse(op.isRegionDestroy()); assertFalse(op.isRegion()); assertFalse(op.isLocal()); assertTrue(op.isDistributed()); assertTrue(op.isEntry()); assertFalse(op.isExpiration()); assertFalse(op.isLocalLoad()); assertFalse(op.isNetLoad()); assertFalse(op.isLoad()); assertFalse(op.isNetSearch()); assertFalse(op.isClose()); assertFalse(op.isClear()); } }
apache-2.0
ryantxu/jeo
core/src/main/java/io/jeo/data/mem/MemTileDataset.java
4418
/* Copyright 2014 The jeo project. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.jeo.data.mem; import java.io.IOException; import java.util.Collections; import java.util.Locale; import java.util.Map; import io.jeo.data.Cursor; import io.jeo.data.Driver; import io.jeo.geom.Bounds; import io.jeo.tile.Tile; import io.jeo.tile.TileDataset; import io.jeo.tile.TileGrid; import io.jeo.tile.TilePyramid; import io.jeo.util.Key; import org.osgeo.proj4j.CoordinateReferenceSystem; public class MemTileDataset implements TileDataset { String name; TilePyramid pyramid; Tile[][][] tiles; // z, y, x public MemTileDataset(String name, TilePyramid pyramid) { this.name = name; this.pyramid = pyramid; tiles = new Tile[pyramid.grids().size()][][]; } @Override public Driver<?> driver() { return new Memory(); } @Override public Map<Key<?>, Object> driverOptions() { return Collections.emptyMap(); } @Override public String name() { return name; } @Override public CoordinateReferenceSystem crs() throws IOException { return pyramid.crs(); } @Override public Bounds bounds() throws IOException { return pyramid.bounds(); } @Override public TilePyramid pyramid() throws IOException { return pyramid; } @Override public Tile read(long z, long x, long y) throws IOException { TileGrid grid = pyramid.grid((int) z); if (grid == null) { //throw new IOException(String.format(Locale.ROOT,"no grid for zoom level %d", z)); return null; } if (y >= grid.height()) { return null; //throw new IOException(String.format(Locale.ROOT,"y must be < %d", grid.getHeight())); } if (x >= grid.width()) { return null; //throw new IOException(String.format(Locale.ROOT,"x must be < %d", grid.getWidth())); } return tiles[(int)z][(int)y][(int)x]; } @Override public Cursor<Tile> read(final long z1, final long z2, final long x1, final long x2, final long y1, final long y2) throws IOException { return new Cursor<Tile>() { int z = (int) z1; int y = (int) y1; int x = (int) x1; Tile next; @Override public boolean hasNext() throws IOException { x++; if (x > x2) { x = (int) x1; y++; if (y > y2) { y = (int) y1; z++; } } return z < z2; } @Override public Tile next() throws IOException { return tiles[z][y][x]; } @Override public void close() { } }; } public void put(int z, Tile[][] tiles) { if (z > this.tiles.length) { throw new IndexOutOfBoundsException(String.format(Locale.ROOT, "zoom level %d greater than pyramid depth %d", z, this.tiles.length)); } TileGrid grid = pyramid.grid(z); if (tiles.length != grid.height()) { throw new IllegalArgumentException(String.format(Locale.ROOT, "number of vertical tiles %d != grid height %d", tiles.length, grid.height())); } for (Tile[] row : tiles) { if (row.length != grid.width()) { throw new IndexOutOfBoundsException(String.format(Locale.ROOT, "number of horizontal tiles %d != grid width %d", tiles.length, grid.width())); } } this.tiles[z] = tiles; } @Override public void close() { } }
apache-2.0
AndreasAbdi/jackrabbit-oak
oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/document/rdb/RDBBlobStore.java
23690
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.plugins.document.rdb; import java.io.Closeable; import java.io.IOException; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.sql.Connection; import java.sql.DatabaseMetaData; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Set; import javax.sql.DataSource; import org.apache.jackrabbit.oak.commons.StringUtils; import org.apache.jackrabbit.oak.plugins.blob.CachingBlobStore; import org.apache.jackrabbit.oak.plugins.document.DocumentStoreException; import org.apache.jackrabbit.oak.spi.blob.AbstractBlobStore; import org.apache.jackrabbit.oak.util.OakVersion; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.AbstractIterator; public class RDBBlobStore extends CachingBlobStore implements Closeable { /** * Creates a {@linkplain RDBBlobStore} instance using the provided * {@link DataSource} using the given {@link RDBOptions}. */ public RDBBlobStore(DataSource ds, RDBOptions options) { try { initialize(ds, options); } catch (Exception ex) { throw new DocumentStoreException("initializing RDB blob store", ex); } } /** * Creates a {@linkplain RDBBlobStore} instance using the provided * {@link DataSource} using default {@link RDBOptions}. */ public RDBBlobStore(DataSource ds) { this(ds, new RDBOptions()); } @Override public void close() { if (!this.tablesToBeDropped.isEmpty()) { LOG.debug("attempting to drop: " + this.tablesToBeDropped); for (String tname : this.tablesToBeDropped) { Connection con = null; try { con = this.ch.getRWConnection(); Statement stmt = null; try { stmt = con.createStatement(); stmt.execute("drop table " + tname); stmt.close(); stmt = null; con.commit(); } catch (SQLException ex) { this.ch.closeStatement(stmt); LOG.debug("attempting to drop: " + tname); } } catch (SQLException ex) { LOG.debug("attempting to drop: " + tname); } finally { try { if (con != null) { con.close(); } } catch (SQLException ex) { LOG.debug("on close ", ex); } } } } try { this.ch.close(); } catch (IOException ex) { LOG.error("closing connection handler", ex); } } @Override protected void finalize() { if (!this.ch.isClosed() && this.callStack != null) { LOG.debug("finalizing RDBDocumentStore that was not disposed", this.callStack); } } private static final Logger LOG = LoggerFactory.getLogger(RDBBlobStore.class); // ID size we need to support; is 2 * (hex) size of digest length protected static final int IDSIZE; static { try { MessageDigest md = MessageDigest.getInstance(AbstractBlobStore.HASH_ALGORITHM); IDSIZE = md.getDigestLength() * 2; } catch (NoSuchAlgorithmException ex) { LOG.error ("can't determine digest length for blob store", ex); throw new RuntimeException(ex); } } private Exception callStack; protected RDBConnectionHandler ch; // from options protected String tnData; protected String tnMeta; private Set<String> tablesToBeDropped = new HashSet<String>(); private void initialize(DataSource ds, RDBOptions options) throws Exception { this.tnData = RDBJDBCTools.createTableName(options.getTablePrefix(), "DATASTORE_DATA"); this.tnMeta = RDBJDBCTools.createTableName(options.getTablePrefix(), "DATASTORE_META"); this.ch = new RDBConnectionHandler(ds); Connection con = this.ch.getRWConnection(); int isolation = con.getTransactionIsolation(); String isolationDiags = RDBJDBCTools.isolationLevelToString(isolation); if (isolation != Connection.TRANSACTION_READ_COMMITTED) { LOG.info("Detected transaction isolation level " + isolationDiags + " is " + (isolation < Connection.TRANSACTION_READ_COMMITTED ? "lower" : "higher") + " than expected " + RDBJDBCTools.isolationLevelToString(Connection.TRANSACTION_READ_COMMITTED) + " - check datasource configuration"); } DatabaseMetaData md = con.getMetaData(); RDBBlobStoreDB db = RDBBlobStoreDB.getValue(md.getDatabaseProductName()); String versionDiags = db.checkVersion(md); if (!versionDiags.isEmpty()) { LOG.info(versionDiags); } String dbDesc = String.format("%s %s (%d.%d)", md.getDatabaseProductName(), md.getDatabaseProductVersion(), md.getDatabaseMajorVersion(), md.getDatabaseMinorVersion()); String driverDesc = String.format("%s %s (%d.%d)", md.getDriverName(), md.getDriverVersion(), md.getDriverMajorVersion(), md.getDriverMinorVersion()); String dbUrl = md.getURL(); List<String> tablesCreated = new ArrayList<String>(); List<String> tablesPresent = new ArrayList<String>(); Statement createStatement = null; try { for (String tableName : new String[] { this.tnData, this.tnMeta }) { PreparedStatement checkStatement = null; try { checkStatement = con.prepareStatement("select ID from " + tableName + " where ID = ?"); checkStatement.setString(1, "0"); checkStatement.executeQuery(); checkStatement.close(); checkStatement = null; con.commit(); tablesPresent.add(tableName); } catch (SQLException ex) { this.ch.closeStatement(checkStatement); // table does not appear to exist con.rollback(); createStatement = con.createStatement(); if (this.tnMeta.equals(tableName)) { String ct = db.getMetaTableCreationStatement(tableName); createStatement.execute(ct); } else { String ct = db.getDataTableCreationStatement(tableName); createStatement.execute(ct); } createStatement.close(); createStatement = null; con.commit(); tablesCreated.add(tableName); } } if (options.isDropTablesOnClose()) { tablesToBeDropped.addAll(tablesCreated); } LOG.info("RDBBlobStore (" + OakVersion.getVersion() + ") instantiated for database " + dbDesc + ", using driver: " + driverDesc + ", connecting to: " + dbUrl + ", transaction isolation level: " + isolationDiags); if (!tablesPresent.isEmpty()) { LOG.info("Tables present upon startup: " + tablesPresent); } if (!tablesCreated.isEmpty()) { LOG.info("Tables created upon startup: " + tablesCreated + (options.isDropTablesOnClose() ? " (will be dropped on exit)" : "")); } this.callStack = LOG.isDebugEnabled() ? new Exception("call stack of RDBBlobStore creation") : null; } finally { this.ch.closeStatement(createStatement); this.ch.closeConnection(con); } } private long minLastModified; @Override protected void storeBlock(byte[] digest, int level, byte[] data) throws IOException { try { storeBlockInDatabase(digest, level, data); } catch (SQLException e) { throw new IOException(e); } } private void storeBlockInDatabase(byte[] digest, int level, byte[] data) throws SQLException { String id = StringUtils.convertBytesToHex(digest); cache.put(id, data); Connection con = this.ch.getRWConnection(); try { long now = System.currentTimeMillis(); PreparedStatement prep = con.prepareStatement("update " + this.tnMeta + " set LASTMOD = ? where ID = ?"); int count; try { prep.setLong(1, now); prep.setString(2, id); count = prep.executeUpdate(); } catch (SQLException ex) { LOG.error("trying to update metadata", ex); throw new RuntimeException("trying to update metadata", ex); } finally { prep.close(); } if (count == 0) { try { prep = con.prepareStatement("insert into " + this.tnData + " (ID, DATA) values(?, ?)"); try { prep.setString(1, id); prep.setBytes(2, data); prep.execute(); } finally { prep.close(); } } catch (SQLException ex) { this.ch.rollbackConnection(con); // the insert failed although it should have succeeded; see whether the blob already exists prep = con.prepareStatement("select DATA from " + this.tnData + " where ID = ?"); byte[] dbdata = null; try { prep.setString(1, id); ResultSet rs = prep.executeQuery(); if (rs.next()) { dbdata = rs.getBytes(1); } } finally { prep.close(); } if (dbdata == null) { // insert failed although record isn't there String message = "insert document failed for id " + id + " with length " + data.length + " (check max size of datastore_data.data)"; LOG.error(message, ex); throw new RuntimeException(message, ex); } else if (!Arrays.equals(data, dbdata)) { // record is there but contains different data String message = "DATA table already contains blob for id " + id + ", but the actual data differs (lengths: " + data.length + ", " + dbdata.length + ")"; LOG.error(message, ex); throw new RuntimeException(message, ex); } else { // just recover LOG.info("recovered from DB inconsistency for id " + id + ": meta record was missing (impact will be minor performance degradation)"); } } try { prep = con.prepareStatement("insert into " + this.tnMeta + " (ID, LVL, LASTMOD) values(?, ?, ?)"); try { prep.setString(1, id); prep.setInt(2, level); prep.setLong(3, now); prep.execute(); } finally { prep.close(); } } catch (SQLException e) { // already exists - ok LOG.debug("inserting meta record for id " + id, e); } } } finally { con.commit(); this.ch.closeConnection(con); } } // needed in test protected byte[] readBlockFromBackend(byte[] digest) throws Exception { String id = StringUtils.convertBytesToHex(digest); Connection con = this.ch.getROConnection(); byte[] data; try { PreparedStatement prep = con.prepareStatement("select DATA from " + this.tnData + " where ID = ?"); try { prep.setString(1, id); ResultSet rs = prep.executeQuery(); if (!rs.next()) { throw new IOException("Datastore block " + id + " not found"); } data = rs.getBytes(1); } finally { prep.close(); } } finally { con.commit(); this.ch.closeConnection(con); } return data; } @Override protected byte[] readBlockFromBackend(BlockId blockId) throws Exception { String id = StringUtils.convertBytesToHex(blockId.getDigest()); byte[] data = cache.get(id); if (data == null) { Connection con = this.ch.getROConnection(); try { PreparedStatement prep = con.prepareStatement("select DATA from " + this.tnData + " where ID = ?"); try { prep.setString(1, id); ResultSet rs = prep.executeQuery(); if (!rs.next()) { throw new IOException("Datastore block " + id + " not found"); } data = rs.getBytes(1); } finally { prep.close(); } cache.put(id, data); } finally { con.commit(); this.ch.closeConnection(con); } } // System.out.println(" read block " + id + " blockLen: " + // data.length + " [0]: " + data[0]); if (blockId.getPos() == 0) { return data; } int len = (int) (data.length - blockId.getPos()); if (len < 0) { return new byte[0]; } byte[] d2 = new byte[len]; System.arraycopy(data, (int) blockId.getPos(), d2, 0, len); return d2; } @Override public void startMark() throws IOException { minLastModified = System.currentTimeMillis(); markInUse(); } @Override protected boolean isMarkEnabled() { return minLastModified != 0; } @Override protected void mark(BlockId blockId) throws Exception { Connection con = this.ch.getRWConnection(); PreparedStatement prep = null; try { if (minLastModified == 0) { return; } String id = StringUtils.convertBytesToHex(blockId.getDigest()); prep = con.prepareStatement("update " + this.tnMeta + " set LASTMOD = ? where ID = ? and LASTMOD < ?"); prep.setLong(1, System.currentTimeMillis()); prep.setString(2, id); prep.setLong(3, minLastModified); prep.executeUpdate(); prep.close(); } finally { this.ch.closeStatement(prep); con.commit(); this.ch.closeConnection(con); } } @Override public int sweep() throws IOException { try { return sweepFromDatabase(); } catch (SQLException e) { throw new IOException(e); } } private int sweepFromDatabase() throws SQLException { Connection con = this.ch.getRWConnection(); PreparedStatement prepCheck = null, prepDelMeta = null, prepDelData = null; ResultSet rs = null; try { int count = 0; prepCheck = con.prepareStatement("select ID from " + this.tnMeta + " where LASTMOD < ?"); prepCheck.setLong(1, minLastModified); rs = prepCheck.executeQuery(); ArrayList<String> ids = new ArrayList<String>(); while (rs.next()) { ids.add(rs.getString(1)); } rs.close(); rs = null; prepCheck.close(); prepCheck = null; prepDelMeta = con.prepareStatement("delete from " + this.tnMeta + " where ID = ?"); prepDelData = con.prepareStatement("delete from " + this.tnData + " where ID = ?"); for (String id : ids) { prepDelMeta.setString(1, id); prepDelMeta.execute(); prepDelData.setString(1, id); prepDelData.execute(); count++; } prepDelMeta.close(); prepDelMeta = null; prepDelData.close(); prepDelData = null; minLastModified = 0; return count; } finally { this.ch.closeResultSet(rs); this.ch.closeStatement(prepCheck); this.ch.closeStatement(prepDelMeta); this.ch.closeStatement(prepDelData); con.commit(); this.ch.closeConnection(con); } } @Override public long countDeleteChunks(List<String> chunkIds, long maxLastModifiedTime) throws Exception { long count = 0; // sanity check if (chunkIds.isEmpty()) { // sanity check, nothing to do return count; } Connection con = this.ch.getRWConnection(); PreparedStatement prepMeta = null; PreparedStatement prepData = null; try { StringBuilder inClause = new StringBuilder(); int batch = chunkIds.size(); for (int i = 0; i < batch; i++) { inClause.append('?'); if (i != batch - 1) { inClause.append(','); } } if (maxLastModifiedTime > 0) { prepMeta = con.prepareStatement("delete from " + this.tnMeta + " where ID in (" + inClause.toString() + ") and LASTMOD <= ?"); prepMeta.setLong(batch + 1, maxLastModifiedTime); prepData = con.prepareStatement("delete from " + this.tnData + " where ID in (" + inClause.toString() + ") and not exists(select * from " + this.tnMeta + " m where ID = m.ID and m.LASTMOD <= ?)"); prepData.setLong(batch + 1, maxLastModifiedTime); } else { prepMeta = con.prepareStatement("delete from " + this.tnMeta + " where ID in (" + inClause.toString() + ")"); prepData = con.prepareStatement("delete from " + this.tnData + " where ID in (" + inClause.toString() + ")"); } for (int idx = 0; idx < batch; idx++) { prepMeta.setString(idx + 1, chunkIds.get(idx)); prepData.setString(idx + 1, chunkIds.get(idx)); } count = prepMeta.executeUpdate(); prepData.execute(); prepMeta.close(); prepMeta = null; prepData.close(); prepData = null; } finally { this.ch.closeStatement(prepMeta); this.ch.closeStatement(prepData); con.commit(); this.ch.closeConnection(con); } return count; } @Override public Iterator<String> getAllChunkIds(long maxLastModifiedTime) throws Exception { return new ChunkIdIterator(this.ch, maxLastModifiedTime, this.tnMeta); } /** * Reads chunk IDs in batches. */ private static class ChunkIdIterator extends AbstractIterator<String> { private long maxLastModifiedTime; private RDBConnectionHandler ch; private static int BATCHSIZE = 1024 * 64; private List<String> results = new LinkedList<String>(); private String lastId = null; private String metaTable; public ChunkIdIterator(RDBConnectionHandler ch, long maxLastModifiedTime, String metaTable) { this.maxLastModifiedTime = maxLastModifiedTime; this.ch = ch; this.metaTable = metaTable; } @Override protected String computeNext() { if (!results.isEmpty()) { return results.remove(0); } else { // need to refill if (refill()) { return computeNext(); } else { return endOfData(); } } } private boolean refill() { StringBuffer query = new StringBuffer(); query.append("select ID from " + metaTable); if (maxLastModifiedTime > 0) { query.append(" where LASTMOD <= ?"); if (lastId != null) { query.append(" and ID > ?"); } } else { if (lastId != null) { query.append(" where ID > ?"); } } query.append(" order by ID"); Connection connection = null; try { connection = this.ch.getROConnection(); PreparedStatement prep = null; ResultSet rs = null; try { prep = connection.prepareStatement(query.toString()); int idx = 1; if (maxLastModifiedTime > 0) { prep.setLong(idx++, maxLastModifiedTime); } if (lastId != null) { prep.setString(idx, lastId); } prep.setFetchSize(BATCHSIZE); rs = prep.executeQuery(); while (rs.next()) { lastId = rs.getString(1); results.add(lastId); } rs.close(); rs = null; return !results.isEmpty(); } finally { this.ch.closeResultSet(rs); this.ch.closeStatement(prep); connection.commit(); this.ch.closeConnection(connection); } } catch (SQLException ex) { LOG.debug("error executing ID lookup", ex); this.ch.rollbackConnection(connection); this.ch.closeConnection(connection); return false; } } } }
apache-2.0
EnMasseProject/enmasse
systemtests/src/main/java/io/enmasse/systemtest/selenium/resources/BindingSecretData.java
4749
/* * Copyright 2018, EnMasse authors. * License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html). */ package io.enmasse.systemtest.selenium.resources; import io.enmasse.systemtest.UserCredentials; import io.vertx.core.json.JsonObject; import org.openqa.selenium.By; import org.openqa.selenium.WebElement; import java.util.List; public class BindingSecretData { private String id; private String externalMessagingHost; private String externalMessagingPort; private String messagingCert; private String messagingHost; private String username; private String password; private String console; private String externalMqttHost; private String externalMqttPort; private String messagingAmqpPort; private String messagingAmqpsPort; private String mqttCert; private String mqttHost; private String mqqtPort; private String mqttsPort; public BindingSecretData(WebElement item) { List<WebElement> data = item.findElements(By.className("image-source-item")); this.externalMessagingHost = (String) parseDataFromSecret(data, "externalMessagingHost"); this.externalMessagingPort = (String) parseDataFromSecret(data, "externalMessagingPort"); this.messagingCert = (String) parseDataFromSecret(data, "messagingCert.pem"); this.messagingHost = (String) parseDataFromSecret(data, "messagingHost"); this.username = (String) parseDataFromSecret(data, "username"); this.password = (String) parseDataFromSecret(data, "password"); this.console = (String) parseDataFromSecret(data, "console"); this.externalMqttHost = (String) parseDataFromSecret(data, "externalMqttHost"); this.externalMqttPort = (String) parseDataFromSecret(data, "externalMqttPort"); this.messagingAmqpPort = (String) parseDataFromSecret(data, "messagingAmqpPort"); this.messagingAmqpsPort = (String) parseDataFromSecret(data, "messagingAmqpsPort"); this.mqttCert = (String) parseDataFromSecret(data, "mqttCert.pem"); this.mqttHost = (String) parseDataFromSecret(data, "mqttHost"); this.mqqtPort = (String) parseDataFromSecret(data, "mqttMqttPort"); this.mqttsPort = (String) parseDataFromSecret(data, "mqttMqttsPort"); } public BindingSecretData(JsonObject binding, String id) { JsonObject credentials = binding.getJsonObject("credentials"); this.id = id; this.username = credentials.getString("username"); this.password = credentials.getString("password"); this.messagingHost = credentials.getString("messagingHost"); this.messagingCert = credentials.getString("messagingCert.pem"); } public String getExternalMessagingHost() { return externalMessagingHost; } public String getExternalMessagingPort() { return externalMessagingPort; } public String getMessagingCert() { return messagingCert; } public String getMessagingHost() { return messagingHost; } public String getUsername() { return username; } public String getPassword() { return password; } public UserCredentials getCredentials() { return new UserCredentials(username, password); } public String getConsole() { return console; } public String getExternalMqttHost() { return externalMqttHost; } public String getExternalMqttPort() { return externalMqttPort; } public String getMessagingAmqpPort() { return messagingAmqpPort; } public String getMessagingAmqpsPort() { return messagingAmqpsPort; } public String getMqttCert() { return mqttCert; } public String getMqttHost() { return mqttHost; } public String getMqqtPort() { return mqqtPort; } public String getMqttsPort() { return mqttsPort; } public String getId() { return id; } private Object parseDataFromSecret(List<WebElement> data, String dataName) { for (WebElement d : data) { if (d.findElement(By.tagName("dt")).getText().equals(dataName)) { try { return d.findElement(By.tagName("input")).getAttribute("value"); } catch (Exception ex) { return d.findElement(By.tagName("pre")).getText(); } } } return null; } @Override public String toString() { return String.format("Binding secret data username: %s, password %s, external: %s:%s", username, password, externalMessagingHost, externalMessagingPort); } }
apache-2.0
vonZeppelin/planning-poker
src/main/java/org/lbogdanov/poker/web/plugin/CustomScrollbarPlugin.java
2567
/** * Copyright 2012 Leonid Bogdanov * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.lbogdanov.poker.web.plugin; import java.util.Arrays; import org.apache.wicket.markup.head.CssHeaderItem; import org.apache.wicket.markup.head.HeaderItem; import org.apache.wicket.markup.head.JavaScriptHeaderItem; import org.apache.wicket.request.resource.CssResourceReference; import org.apache.wicket.request.resource.JavaScriptResourceReference; import org.apache.wicket.request.resource.ResourceReference; import org.apache.wicket.resource.JQueryPluginResourceReference; import com.google.common.collect.Iterables; /** * A <code>ResourceReference</code> for jQuery custom scrollbar plugin. * * @author Leonid Bogdanov */ public class CustomScrollbarPlugin extends JQueryPluginResourceReference { private static final CustomScrollbarPlugin INSTANCE = new CustomScrollbarPlugin(); private static final ResourceReference MOUSEWHEEL = new JavaScriptResourceReference(CustomScrollbarPlugin.class, "jquery.mousewheel.js"); private static final ResourceReference CSS = new CssResourceReference(CustomScrollbarPlugin.class, "jquery.mCustomScrollbar.css"); /** * Returns a single instance of jQuery custom scrollbar plugin resource reference. * * @return the single instance */ public static CustomScrollbarPlugin get() { return INSTANCE; } /** * {@inheritDoc} */ @Override public Iterable<? extends HeaderItem> getDependencies() { return Iterables.concat(super.getDependencies(), Arrays.<HeaderItem>asList(JavaScriptHeaderItem.forReference(MOUSEWHEEL), CssHeaderItem.forReference(CSS))); } private CustomScrollbarPlugin() { super(CustomScrollbarPlugin.class, "jquery.mCustomScrollbar.js"); } }
apache-2.0